Full Code of HumeAI/hume-api-examples for AI

main 3f42c8766603 cached
678 files
10.7 MB
2.8M tokens
787 symbols
1 requests
Copy disabled (too large) Download .txt
Showing preview only (11,328K chars total). Download the full file to get everything.
Repository: HumeAI/hume-api-examples
Branch: main
Commit: 3f42c8766603
Files: 678
Total size: 10.7 MB

Directory structure:
gitextract_vgmds2a9/

├── .github/
│   ├── dependabot.yml
│   └── workflows/
│       ├── dependabot-auto-merge.yml
│       └── test-examples.yml
├── .gitignore
├── Directory.Packages.props
├── LICENSE
├── README.md
├── evi/
│   ├── evi-dotnet-quickstart/
│   │   ├── .gitignore
│   │   ├── EviTests.cs
│   │   ├── Program.cs
│   │   ├── README.md
│   │   ├── evi-csharp-quickstart.csproj
│   │   ├── evi-csharp-quickstart.tests.csproj
│   │   └── sample_input.pcm
│   ├── evi-flutter/
│   │   ├── .gitignore
│   │   ├── README.md
│   │   ├── analysis_options.yaml
│   │   ├── android/
│   │   │   ├── .gitignore
│   │   │   ├── app/
│   │   │   │   ├── build.gradle
│   │   │   │   └── src/
│   │   │   │       ├── debug/
│   │   │   │       │   └── AndroidManifest.xml
│   │   │   │       ├── main/
│   │   │   │       │   ├── AndroidManifest.xml
│   │   │   │       │   ├── kotlin/
│   │   │   │       │   │   └── com/
│   │   │   │       │   │       └── example/
│   │   │   │       │   │           └── evi_example/
│   │   │   │       │   │               └── MainActivity.kt
│   │   │   │       │   └── res/
│   │   │   │       │       ├── drawable/
│   │   │   │       │       │   └── launch_background.xml
│   │   │   │       │       ├── drawable-v21/
│   │   │   │       │       │   └── launch_background.xml
│   │   │   │       │       ├── values/
│   │   │   │       │       │   └── styles.xml
│   │   │   │       │       └── values-night/
│   │   │   │       │           └── styles.xml
│   │   │   │       └── profile/
│   │   │   │           └── AndroidManifest.xml
│   │   │   ├── build.gradle
│   │   │   ├── gradle/
│   │   │   │   └── wrapper/
│   │   │   │       └── gradle-wrapper.properties
│   │   │   ├── gradle.properties
│   │   │   └── settings.gradle
│   │   ├── audio/
│   │   │   ├── .gitignore
│   │   │   ├── .metadata
│   │   │   ├── ios/
│   │   │   │   ├── .gitignore
│   │   │   │   ├── Assets/
│   │   │   │   │   └── .gitkeep
│   │   │   │   ├── Classes/
│   │   │   │   │   ├── AudioPlugin.swift
│   │   │   │   │   ├── Microphone.swift
│   │   │   │   │   └── SoundPlayer.swift
│   │   │   │   ├── Resources/
│   │   │   │   │   └── PrivacyInfo.xcprivacy
│   │   │   │   └── audio.podspec
│   │   │   ├── lib/
│   │   │   │   ├── audio.dart
│   │   │   │   ├── audio_method_channel.dart
│   │   │   │   ├── audio_platform_interface.dart
│   │   │   │   └── dart_audio.dart
│   │   │   ├── pubspec.yaml
│   │   │   └── test/
│   │   │       ├── audio_method_channel_test.dart
│   │   │       └── audio_test.dart
│   │   ├── ios/
│   │   │   ├── .gitignore
│   │   │   ├── Flutter/
│   │   │   │   ├── AppFrameworkInfo.plist
│   │   │   │   ├── Debug.xcconfig
│   │   │   │   └── Release.xcconfig
│   │   │   ├── Podfile
│   │   │   ├── Runner/
│   │   │   │   ├── AppDelegate.swift
│   │   │   │   ├── Assets.xcassets/
│   │   │   │   │   ├── AppIcon.appiconset/
│   │   │   │   │   │   └── Contents.json
│   │   │   │   │   └── LaunchImage.imageset/
│   │   │   │   │       ├── Contents.json
│   │   │   │   │       └── README.md
│   │   │   │   ├── Base.lproj/
│   │   │   │   │   ├── LaunchScreen.storyboard
│   │   │   │   │   └── Main.storyboard
│   │   │   │   ├── Info.plist
│   │   │   │   └── Runner-Bridging-Header.h
│   │   │   ├── Runner.xcodeproj/
│   │   │   │   ├── project.pbxproj
│   │   │   │   ├── project.xcworkspace/
│   │   │   │   │   ├── contents.xcworkspacedata
│   │   │   │   │   └── xcshareddata/
│   │   │   │   │       ├── IDEWorkspaceChecks.plist
│   │   │   │   │       └── WorkspaceSettings.xcsettings
│   │   │   │   └── xcshareddata/
│   │   │   │       └── xcschemes/
│   │   │   │           └── Runner.xcscheme
│   │   │   ├── Runner.xcworkspace/
│   │   │   │   ├── contents.xcworkspacedata
│   │   │   │   └── xcshareddata/
│   │   │   │       ├── IDEWorkspaceChecks.plist
│   │   │   │       └── WorkspaceSettings.xcsettings
│   │   │   ├── RunnerTests/
│   │   │   │   └── RunnerTests.swift
│   │   │   └── build/
│   │   │       └── ios/
│   │   │           └── XCBuildData/
│   │   │               └── PIFCache/
│   │   │                   └── workspace/
│   │   │                       └── WORKSPACE@v11_hash=(null)_subobjects=4483c0dac1d2a63621e8a5d74e580a19-json
│   │   ├── lib/
│   │   │   ├── chat_card.dart
│   │   │   ├── evi_message.dart
│   │   │   ├── main.dart
│   │   │   └── theme.dart
│   │   ├── pubspec.yaml
│   │   ├── test/
│   │   │   └── widget_test.dart
│   │   └── web/
│   │       ├── index.html
│   │       └── manifest.json
│   ├── evi-next-js-app-router-quickstart/
│   │   ├── .eslintrc.json
│   │   ├── .gitignore
│   │   ├── .prettierrc.json
│   │   ├── README.md
│   │   ├── app/
│   │   │   ├── actions/
│   │   │   │   └── set-llm-key.ts
│   │   │   ├── api-key/
│   │   │   │   └── page.tsx
│   │   │   ├── error.tsx
│   │   │   ├── globals.css
│   │   │   ├── layout.tsx
│   │   │   ├── page.tsx
│   │   │   └── session-settings/
│   │   │       └── page.tsx
│   │   ├── components/
│   │   │   ├── Chat.tsx
│   │   │   ├── ChatLoader.tsx
│   │   │   ├── Controls.tsx
│   │   │   ├── Expressions.tsx
│   │   │   ├── Messages.tsx
│   │   │   ├── MicFFT.tsx
│   │   │   ├── Nav.tsx
│   │   │   ├── StartCall.tsx
│   │   │   ├── logos/
│   │   │   │   ├── GitHub.tsx
│   │   │   │   └── Hume.tsx
│   │   │   └── ui/
│   │   │       ├── button.tsx
│   │   │       └── toggle.tsx
│   │   ├── components.json
│   │   ├── package.json
│   │   ├── playwright.config.ts
│   │   ├── postcss.config.mjs
│   │   ├── tailwind.config.ts
│   │   ├── test-results/
│   │   │   └── .last-run.json
│   │   ├── tests/
│   │   │   └── voice-react.spec.ts
│   │   ├── tsconfig.json
│   │   └── utils/
│   │       ├── e2e-hooks.ts
│   │       ├── expressionColors.ts
│   │       ├── index.ts
│   │       └── session-settings.ts
│   ├── evi-next-js-function-calling/
│   │   ├── .eslintrc.json
│   │   ├── .gitignore
│   │   ├── .prettierrc.json
│   │   ├── README.md
│   │   ├── app/
│   │   │   ├── api/
│   │   │   │   └── fetchWeather/
│   │   │   │       └── route.ts
│   │   │   ├── error.tsx
│   │   │   ├── globals.css
│   │   │   ├── layout.tsx
│   │   │   └── page.tsx
│   │   ├── components/
│   │   │   ├── Chat.tsx
│   │   │   ├── ChatLoader.tsx
│   │   │   ├── Controls.tsx
│   │   │   ├── Expressions.tsx
│   │   │   ├── Messages.tsx
│   │   │   ├── MicFFT.tsx
│   │   │   ├── Nav.tsx
│   │   │   ├── StartCall.tsx
│   │   │   ├── logos/
│   │   │   │   ├── GitHub.tsx
│   │   │   │   └── Hume.tsx
│   │   │   └── ui/
│   │   │       ├── button.tsx
│   │   │       └── toggle.tsx
│   │   ├── components.json
│   │   ├── package.json
│   │   ├── postcss.config.mjs
│   │   ├── tailwind.config.ts
│   │   ├── tsconfig.json
│   │   └── utils/
│   │       ├── expressionColors.ts
│   │       ├── fetchWeather.ts
│   │       └── index.ts
│   ├── evi-next-js-pages-router-quickstart/
│   │   ├── .eslintrc.json
│   │   ├── .gitignore
│   │   ├── .prettierrc.json
│   │   ├── README.md
│   │   ├── components/
│   │   │   ├── Chat.tsx
│   │   │   ├── Controls.tsx
│   │   │   ├── Expressions.tsx
│   │   │   ├── Messages.tsx
│   │   │   ├── MicFFT.tsx
│   │   │   ├── Nav.tsx
│   │   │   ├── StartCall.tsx
│   │   │   ├── logos/
│   │   │   │   ├── GitHub.tsx
│   │   │   │   └── Hume.tsx
│   │   │   └── ui/
│   │   │       ├── button.tsx
│   │   │       └── toggle.tsx
│   │   ├── components.json
│   │   ├── next.config.js
│   │   ├── package.json
│   │   ├── pages/
│   │   │   ├── 500.tsx
│   │   │   ├── _app.tsx
│   │   │   ├── _document.tsx
│   │   │   ├── api/
│   │   │   │   └── control-plane/
│   │   │   │       └── set-llm-key.ts
│   │   │   └── index.tsx
│   │   ├── postcss.config.mjs
│   │   ├── styles/
│   │   │   └── globals.css
│   │   ├── tailwind.config.ts
│   │   ├── tsconfig.json
│   │   └── utils/
│   │       ├── expressionColors.ts
│   │       └── index.ts
│   ├── evi-prompting-examples/
│   │   ├── README.md
│   │   ├── deeper_questions_prompt.txt
│   │   ├── default_prompt.txt
│   │   └── evi-3-default-prompt.txt
│   ├── evi-python-chat-history/
│   │   ├── .gitignore
│   │   ├── README.md
│   │   ├── main.py
│   │   ├── pyproject.toml
│   │   └── transcript_4d720063-d4ab-4407-ad22-e41079373d79.txt
│   ├── evi-python-clm-sse/
│   │   ├── README.md
│   │   ├── openai_sse.py
│   │   └── pyproject.toml
│   ├── evi-python-clm-wss/
│   │   ├── LICENSE
│   │   ├── README.md
│   │   ├── docs/
│   │   │   └── detailed-install-instructions-mac.md
│   │   ├── main.py
│   │   └── pyproject.toml
│   ├── evi-python-control-plane/
│   │   ├── LICENSE
│   │   ├── README.md
│   │   ├── main.py
│   │   └── pyproject.toml
│   ├── evi-python-function-calling/
│   │   ├── .gitignore
│   │   ├── LICENSE
│   │   ├── README.md
│   │   ├── main.py
│   │   └── utils.py
│   ├── evi-python-phone-calling-proxy-server/
│   │   ├── .gitignore
│   │   ├── README.md
│   │   ├── app.py
│   │   ├── audio_processors/
│   │   │   ├── __init__.py
│   │   │   ├── evi_audio_processor.py
│   │   │   └── twilio_audio_processor.py
│   │   ├── pyproject.toml
│   │   └── tools.py
│   ├── evi-python-quickstart/
│   │   ├── .gitignore
│   │   ├── LICENSE
│   │   ├── README.md
│   │   ├── conftest.py
│   │   ├── pyproject.toml
│   │   ├── quickstart.py
│   │   └── test_quickstart.py
│   ├── evi-python-raw-api/
│   │   ├── .gitignore
│   │   ├── LICENSE
│   │   ├── README.md
│   │   ├── requirements_linux.txt
│   │   ├── requirements_mac.txt
│   │   └── src/
│   │       ├── authenticator.py
│   │       ├── connection.py
│   │       ├── devices.py
│   │       └── main.py
│   ├── evi-python-webhooks/
│   │   ├── .gitignore
│   │   ├── README.md
│   │   ├── app.py
│   │   ├── pyproject.toml
│   │   └── utils.py
│   ├── evi-python-wss-clm-endpoint/
│   │   ├── .dockerignore
│   │   ├── Dockerfile
│   │   ├── README.md
│   │   ├── agent.py
│   │   ├── app.py
│   │   ├── cdk/
│   │   │   ├── README.md
│   │   │   ├── app.py
│   │   │   ├── cdk/
│   │   │   │   ├── __init__.py
│   │   │   │   └── eliza_stack.py
│   │   │   ├── cdk.json
│   │   │   ├── cdk.out/
│   │   │   │   ├── ElizaStack.assets.json
│   │   │   │   ├── ElizaStack.template.json
│   │   │   │   ├── asset.689e46f5ffafa1e0f81f114b5dfd7694d2d1e291d9bd855e4f7b601d2b2403d0/
│   │   │   │   │   ├── .dockerignore
│   │   │   │   │   ├── Dockerfile
│   │   │   │   │   ├── README.md
│   │   │   │   │   ├── agent.py
│   │   │   │   │   ├── app.py
│   │   │   │   │   ├── modal/
│   │   │   │   │   │   ├── README.md
│   │   │   │   │   │   └── modal_app.py
│   │   │   │   │   └── pyproject.toml
│   │   │   │   ├── asset.ee7de53d64cc9d6248fa6aa550f92358f6c907b5efd6f3298aeab1b5e7ea358a/
│   │   │   │   │   ├── __entrypoint__.js
│   │   │   │   │   └── index.js
│   │   │   │   ├── asset.f372550efb96be7f549f1d0346e8646080c1fe2b15c7c2e3b1dceb07b6656c54/
│   │   │   │   │   ├── .dockerignore
│   │   │   │   │   ├── Dockerfile
│   │   │   │   │   ├── README.md
│   │   │   │   │   ├── agent.py
│   │   │   │   │   ├── app.py
│   │   │   │   │   ├── modal_app.py
│   │   │   │   │   └── pyproject.toml
│   │   │   │   ├── cdk.out
│   │   │   │   ├── manifest.json
│   │   │   │   └── tree.json
│   │   │   └── requirements.txt
│   │   ├── modal/
│   │   │   ├── README.md
│   │   │   └── modal_app.py
│   │   ├── modal_app.py
│   │   └── pyproject.toml
│   ├── evi-react-native/
│   │   ├── .gitignore
│   │   ├── App.tsx
│   │   ├── README.md
│   │   ├── VoiceIsolationModePrompt.tsx
│   │   ├── app.json
│   │   ├── eslint.config.js
│   │   ├── index.ts
│   │   ├── metro.config.js
│   │   ├── modules/
│   │   │   └── audio/
│   │   │       ├── expo-module.config.json
│   │   │       ├── index.ts
│   │   │       └── src/
│   │   │           ├── AudioModule.ts
│   │   │           ├── AudioModule.types.ts
│   │   │           └── AudioModule.web.ts
│   │   ├── package.json
│   │   ├── polyfills.ts
│   │   └── tsconfig.json
│   ├── evi-swift-chat/
│   │   ├── .gitignore
│   │   ├── HumeDemo/
│   │   │   ├── Assets.xcassets/
│   │   │   │   ├── AccentColor.colorset/
│   │   │   │   │   └── Contents.json
│   │   │   │   ├── AppIcon.appiconset/
│   │   │   │   │   └── Contents.json
│   │   │   │   ├── Contents.json
│   │   │   │   └── Logo.imageset/
│   │   │   │       └── Contents.json
│   │   │   ├── EVIDemo/
│   │   │   │   ├── Clients/
│   │   │   │   │   └── AccessTokenClient.swift
│   │   │   │   ├── Extensions/
│   │   │   │   │   └── Dictionary+Additions.swift
│   │   │   │   ├── Mocks.swift
│   │   │   │   ├── Rows/
│   │   │   │   │   ├── DetailedRow.swift
│   │   │   │   │   └── MessageRow.swift
│   │   │   │   └── Views/
│   │   │   │       ├── Components/
│   │   │   │       │   ├── EventRowView.swift
│   │   │   │       │   └── RowView.swift
│   │   │   │       ├── EVIChatView.swift
│   │   │   │       ├── Models/
│   │   │   │       │   └── EVIChatModel.swift
│   │   │   │       └── Modifiers/
│   │   │   │           └── FlippedUpsideDown.swift
│   │   │   ├── HumeDemoApp.swift
│   │   │   ├── Info.plist
│   │   │   └── Preview Content/
│   │   │       ├── EVIChatModel+Previews.swift
│   │   │       └── Preview Assets.xcassets/
│   │   │           └── Contents.json
│   │   ├── HumeDemo.xcodeproj/
│   │   │   ├── project.pbxproj
│   │   │   └── xcshareddata/
│   │   │       └── xcschemes/
│   │   │           └── HumeDemo.xcscheme
│   │   ├── README.md
│   │   └── access_token_service/
│   │       ├── README.md
│   │       ├── requirements.txt
│   │       └── run_token_service.py
│   ├── evi-touchdesigner/
│   │   ├── .gitignore
│   │   ├── HumeTD.tox
│   │   ├── HumeTDDemo.toe
│   │   ├── README.md
│   │   └── Scripts/
│   │       ├── HumeTD.py
│   │       └── MessagePlaback.py
│   ├── evi-typescript-chat-history/
│   │   ├── .gitignore
│   │   ├── README.md
│   │   ├── package.json
│   │   ├── src/
│   │   │   └── index.ts
│   │   └── tsconfig.json
│   ├── evi-typescript-function-calling/
│   │   ├── .gitignore
│   │   ├── README.md
│   │   ├── index.html
│   │   ├── package.json
│   │   ├── src/
│   │   │   ├── handleToolCall.ts
│   │   │   ├── main.ts
│   │   │   ├── styles.css
│   │   │   └── vite-env.d.ts
│   │   └── tsconfig.json
│   ├── evi-typescript-proxy/
│   │   ├── .gitignore
│   │   ├── README.md
│   │   ├── app/
│   │   │   ├── api.ts
│   │   │   ├── cli.ts
│   │   │   ├── downstream.ts
│   │   │   ├── main.ts
│   │   │   ├── package.json
│   │   │   ├── tsconfig.json
│   │   │   ├── upstream.ts
│   │   │   └── util.ts
│   │   ├── shared/
│   │   │   └── types.mts
│   │   └── web/
│   │       ├── .gitignore
│   │       ├── ChatControls.tsx
│   │       ├── ChatMessages.tsx
│   │       ├── EVIChat.tsx
│   │       ├── StartCall.tsx
│   │       ├── WebSocketControls.tsx
│   │       ├── app.tsx
│   │       ├── index.html
│   │       ├── package.json
│   │       ├── styles.css
│   │       ├── tsconfig.json
│   │       └── useProxyState.ts
│   ├── evi-typescript-quickstart/
│   │   ├── .gitignore
│   │   ├── .prettierrc.json
│   │   ├── README.md
│   │   ├── index.html
│   │   ├── package.json
│   │   ├── src/
│   │   │   ├── lib/
│   │   │   │   ├── audio.ts
│   │   │   │   ├── evi.test.ts
│   │   │   │   ├── evi.ts
│   │   │   │   ├── index.ts
│   │   │   │   └── ui.ts
│   │   │   ├── main.ts
│   │   │   ├── styles/
│   │   │   │   └── globals.css
│   │   │   └── vite-env.d.ts
│   │   ├── tsconfig.json
│   │   └── vitest.config.ts
│   ├── evi-typescript-webhooks/
│   │   ├── .gitignore
│   │   ├── README.md
│   │   ├── package.json
│   │   ├── src/
│   │   │   ├── main.ts
│   │   │   └── util.ts
│   │   └── tsconfig.json
│   ├── evi-unity-quickstart/
│   │   ├── .gitignore
│   │   ├── Assets/
│   │   │   ├── DefaultScene.unity
│   │   │   ├── DefaultScene.unity.meta
│   │   │   ├── Editor/
│   │   │   │   └── AutoLoadDefaultScene.cs
│   │   │   ├── Plugins/
│   │   │   │   ├── Microsoft.Extensions.DependencyInjection.Abstractions.dll.meta
│   │   │   │   ├── Microsoft.Extensions.Logging.Abstractions.dll.meta
│   │   │   │   └── Microsoft.IO.RecyclableMemoryStream.dll.meta
│   │   │   ├── Plugins.meta
│   │   │   ├── Scripts/
│   │   │   │   ├── HumeEVI.cs
│   │   │   │   ├── HumeEVI.cs.meta
│   │   │   │   ├── SceneBuilder.cs
│   │   │   │   └── SceneBuilder.cs.meta
│   │   │   └── Scripts.meta
│   │   ├── Packages/
│   │   │   └── manifest.json
│   │   ├── ProjectSettings/
│   │   │   ├── AudioManager.asset
│   │   │   ├── ClusterInputManager.asset
│   │   │   ├── DynamicsManager.asset
│   │   │   ├── EditorBuildSettings.asset
│   │   │   ├── EditorSettings.asset
│   │   │   ├── GraphicsSettings.asset
│   │   │   ├── InputManager.asset
│   │   │   ├── MemorySettings.asset
│   │   │   ├── MultiplayerManager.asset
│   │   │   ├── NavMeshAreas.asset
│   │   │   ├── PackageManagerSettings.asset
│   │   │   ├── Physics2DSettings.asset
│   │   │   ├── PresetManager.asset
│   │   │   ├── ProjectSettings.asset
│   │   │   ├── ProjectVersion.txt
│   │   │   ├── QualitySettings.asset
│   │   │   ├── SceneTemplateSettings.json
│   │   │   ├── TagManager.asset
│   │   │   ├── TimeManager.asset
│   │   │   ├── UnityConnectSettings.asset
│   │   │   ├── VFXManager.asset
│   │   │   └── VersionControlSettings.asset
│   │   └── README.md
│   └── evi-vue-widget/
│       ├── .gitignore
│       ├── .nvmrc
│       ├── README.md
│       ├── index.html
│       ├── package.json
│       ├── src/
│       │   ├── App.vue
│       │   ├── components/
│       │   │   └── HumeEmbed.vue
│       │   ├── main.ts
│       │   ├── style.css
│       │   └── vite-env.d.ts
│       ├── tsconfig.json
│       ├── tsconfig.node.json
│       └── vite.config.ts
├── expression-measurement/
│   ├── batch/
│   │   ├── next-js-emotional-language/
│   │   │   ├── .env.example
│   │   │   ├── .eslintrc.json
│   │   │   ├── .gitignore
│   │   │   ├── .prettierrc
│   │   │   ├── README.md
│   │   │   ├── next.config.js
│   │   │   ├── package.json
│   │   │   ├── postcss.config.js
│   │   │   ├── src/
│   │   │   │   ├── components/
│   │   │   │   │   ├── Introduction.tsx
│   │   │   │   │   ├── TextRender.tsx
│   │   │   │   │   └── Tooltip.tsx
│   │   │   │   ├── lib/
│   │   │   │   │   ├── client.ts
│   │   │   │   │   ├── env.ts
│   │   │   │   │   ├── mutations/
│   │   │   │   │   │   └── processTextFile.ts
│   │   │   │   │   ├── schemas/
│   │   │   │   │   │   └── index.ts
│   │   │   │   │   └── utils.ts
│   │   │   │   ├── pages/
│   │   │   │   │   ├── _app.tsx
│   │   │   │   │   ├── _document.tsx
│   │   │   │   │   ├── api/
│   │   │   │   │   │   ├── results.ts
│   │   │   │   │   │   └── send.ts
│   │   │   │   │   └── index.tsx
│   │   │   │   └── styles/
│   │   │   │       └── globals.css
│   │   │   ├── tailwind.config.js
│   │   │   └── tsconfig.json
│   │   ├── python-top-emotions/
│   │   │   ├── README.md
│   │   │   └── top_emotions.py
│   │   └── typescript-raw-text-processor/
│   │       ├── .gitignore
│   │       ├── README.md
│   │       ├── package.json
│   │       ├── src/
│   │       │   ├── index.test.ts
│   │       │   └── index.ts
│   │       └── tsconfig.json
│   ├── streaming/
│   │   ├── next-js-streaming-example/
│   │   │   ├── .gitignore
│   │   │   ├── README.md
│   │   │   ├── components/
│   │   │   │   ├── inputs/
│   │   │   │   │   ├── Button.tsx
│   │   │   │   │   ├── TextArea.tsx
│   │   │   │   │   └── TextBox.tsx
│   │   │   │   ├── menu/
│   │   │   │   │   ├── Auth.tsx
│   │   │   │   │   ├── Login.tsx
│   │   │   │   │   ├── Nav.tsx
│   │   │   │   │   └── Toolbar.tsx
│   │   │   │   └── widgets/
│   │   │   │       ├── AudioWidgets.tsx
│   │   │   │       ├── BurstWidgets.tsx
│   │   │   │       ├── Descriptor.tsx
│   │   │   │       ├── DiscreteTimeline.tsx
│   │   │   │       ├── FaceTrackedVideo.tsx
│   │   │   │       ├── FaceWidgets.tsx
│   │   │   │       ├── LanguageWidgets.tsx
│   │   │   │       ├── Loader.tsx
│   │   │   │       ├── LoaderSet.tsx
│   │   │   │       ├── ProsodyWidgets.tsx
│   │   │   │       └── TopEmotions.tsx
│   │   │   ├── lib/
│   │   │   │   ├── data/
│   │   │   │   │   ├── audioPrediction.ts
│   │   │   │   │   ├── boundingBox.ts
│   │   │   │   │   ├── characterRange.ts
│   │   │   │   │   ├── embedding.ts
│   │   │   │   │   ├── emotion.ts
│   │   │   │   │   ├── facePrediction.ts
│   │   │   │   │   ├── languagePrediction.ts
│   │   │   │   │   ├── range.ts
│   │   │   │   │   ├── timeRange.ts
│   │   │   │   │   └── trackedFace.ts
│   │   │   │   ├── hooks/
│   │   │   │   │   ├── keyPress.ts
│   │   │   │   │   ├── stability.ts
│   │   │   │   │   └── storage.ts
│   │   │   │   ├── media/
│   │   │   │   │   ├── audioRecorder.ts
│   │   │   │   │   └── videoRecorder.ts
│   │   │   │   └── utilities/
│   │   │   │       ├── asyncUtilities.ts
│   │   │   │       ├── blobUtilities.ts
│   │   │   │       ├── embeddingUtilities.ts
│   │   │   │       ├── emotionUtilities.ts
│   │   │   │       ├── environmentUtilities.ts
│   │   │   │       ├── scalingUtilities.ts
│   │   │   │       ├── styleUtilities.ts
│   │   │   │       └── typeUtilities.ts
│   │   │   ├── next.config.js
│   │   │   ├── package.json
│   │   │   ├── pages/
│   │   │   │   ├── _app.tsx
│   │   │   │   ├── burst/
│   │   │   │   │   ├── index.tsx
│   │   │   │   │   └── timeline/
│   │   │   │   │       └── index.tsx
│   │   │   │   ├── face/
│   │   │   │   │   ├── calibrate/
│   │   │   │   │   │   └── index.tsx
│   │   │   │   │   └── index.tsx
│   │   │   │   ├── index.tsx
│   │   │   │   ├── language/
│   │   │   │   │   └── index.tsx
│   │   │   │   └── prosody/
│   │   │   │       └── index.tsx
│   │   │   ├── postcss.config.js
│   │   │   ├── styles/
│   │   │   │   └── globals.css
│   │   │   ├── tailwind.config.js
│   │   │   └── tsconfig.json
│   │   └── python-streaming-example/
│   │       ├── .gitignore
│   │       ├── README.md
│   │       ├── main.py
│   │       ├── pyproject.toml
│   │       └── test_main.py
│   └── visualization-example/
│       ├── example-notebook.ipynb
│       └── predictions.json
├── monorepo.code-workspace
└── tts/
    ├── tts-dotnet-quickstart/
    │   ├── .gitignore
    │   ├── Program.cs
    │   ├── README.md
    │   ├── StreamingTtsService.cs
    │   ├── TtsTests.cs
    │   ├── tts-csharp-quickstart.csproj
    │   └── tts-csharp-quickstart.tests.csproj
    ├── tts-next-js-agora/
    │   ├── .gitignore
    │   ├── README.md
    │   ├── app/
    │   │   ├── api/
    │   │   │   ├── generate-agora-token/
    │   │   │   │   └── route.ts
    │   │   │   ├── invite-agent/
    │   │   │   │   └── route.ts
    │   │   │   └── stop-conversation/
    │   │   │       └── route.ts
    │   │   ├── globals.css
    │   │   ├── layout.tsx
    │   │   └── page.tsx
    │   ├── components/
    │   │   ├── AudioVisualizer.tsx
    │   │   ├── ConversationComponent.tsx
    │   │   ├── ConvoTextStream.tsx
    │   │   └── MicrophoneButton.tsx
    │   ├── env.example
    │   ├── eslint.config.mjs
    │   ├── lib/
    │   │   └── message.ts
    │   ├── next.config.ts
    │   ├── package.json
    │   ├── tsconfig.json
    │   └── types/
    │       ├── agora-rtc-react.d.ts
    │       ├── agora-token.d.ts
    │       └── conversation.ts
    ├── tts-next-js-chat/
    │   ├── .gitignore
    │   ├── README.md
    │   ├── eslint.config.mjs
    │   ├── next.config.ts
    │   ├── package.json
    │   ├── postcss.config.mjs
    │   ├── src/
    │   │   └── app/
    │   │       ├── api/
    │   │       │   ├── chat/
    │   │       │   │   └── route.ts
    │   │       │   ├── transcribe/
    │   │       │   │   └── route.ts
    │   │       │   ├── tts/
    │   │       │   │   └── route.ts
    │   │       │   └── voices/
    │   │       │       └── route.ts
    │   │       ├── components/
    │   │       │   ├── AudioPlayer.tsx
    │   │       │   ├── Chat.tsx
    │   │       │   ├── ControlsPanel.tsx
    │   │       │   ├── VoiceSelector.tsx
    │   │       │   └── logos/
    │   │       │       └── Hume.tsx
    │   │       ├── context/
    │   │       │   └── VoiceSettingsContext.tsx
    │   │       ├── globals.css
    │   │       ├── hooks/
    │   │       │   ├── useRecording.ts
    │   │       │   ├── useTts.ts
    │   │       │   └── useVoices.ts
    │   │       ├── layout.tsx
    │   │       ├── lib/
    │   │       │   └── humeClient.ts
    │   │       └── page.tsx
    │   └── tsconfig.json
    ├── tts-next-js-vercel-ai-sdk/
    │   ├── .gitignore
    │   ├── README.md
    │   ├── next.config.ts
    │   ├── package.json
    │   ├── postcss.config.mjs
    │   ├── src/
    │   │   ├── actions/
    │   │   │   ├── generate-speech.ts
    │   │   │   └── list-voices.ts
    │   │   ├── app/
    │   │   │   ├── globals.css
    │   │   │   ├── layout.tsx
    │   │   │   └── page.tsx
    │   │   ├── components/
    │   │   │   ├── AudioClipCard.tsx
    │   │   │   ├── AudioGallery.tsx
    │   │   │   ├── TextAreaField.tsx
    │   │   │   ├── TtsForm.tsx
    │   │   │   ├── VoiceSelect.tsx
    │   │   │   └── logos/
    │   │   │       └── Hume.tsx
    │   │   ├── hooks/
    │   │   │   └── useVoices.ts
    │   │   └── types/
    │   │       └── clip.ts
    │   └── tsconfig.json
    ├── tts-python-livekit/
    │   ├── .gitignore
    │   ├── .python-version
    │   ├── README.md
    │   ├── pyproject.toml
    │   └── src/
    │       ├── __init__.py
    │       ├── agent_session/
    │       │   ├── __init__.py
    │       │   ├── constants.py
    │       │   └── main.py
    │       ├── standalone_tts/
    │       │   ├── __init__.py
    │       │   └── main.py
    │       └── utils.py
    ├── tts-python-quickstart/
    │   ├── .gitignore
    │   ├── README.md
    │   ├── app.py
    │   ├── conftest.py
    │   ├── pyproject.toml
    │   └── test_app.py
    ├── tts-swift-quickstart/
    │   ├── .gitignore
    │   ├── HumeDemo/
    │   │   ├── Assets.xcassets/
    │   │   │   ├── AccentColor.colorset/
    │   │   │   │   └── Contents.json
    │   │   │   ├── AppIcon.appiconset/
    │   │   │   │   └── Contents.json
    │   │   │   ├── Contents.json
    │   │   │   └── Logo.imageset/
    │   │   │       └── Contents.json
    │   │   ├── HumeDemoApp.swift
    │   │   ├── Info.plist
    │   │   ├── Preview Content/
    │   │   │   └── Preview Assets.xcassets/
    │   │   │       └── Contents.json
    │   │   └── TTSDemo/
    │   │       ├── Clients/
    │   │       │   └── AccessTokenClient.swift
    │   │       ├── Extensions/
    │   │       │   └── Dictionary+Additions.swift
    │   │       ├── Mocks.swift
    │   │       └── Views/
    │   │           ├── Components/
    │   │           │   ├── RowView.swift
    │   │           │   └── TTSEventView.swift
    │   │           ├── Models/
    │   │           │   ├── TTSEvent.swift
    │   │           │   ├── TTSModel+Types.swift
    │   │           │   └── TTSModel.swift
    │   │           ├── Modifiers/
    │   │           │   └── FlippedUpsideDown.swift
    │   │           └── TTSView.swift
    │   ├── HumeDemo.xcodeproj/
    │   │   ├── project.pbxproj
    │   │   └── xcshareddata/
    │   │       └── xcschemes/
    │   │           └── HumeDemo.xcscheme
    │   ├── README.md
    │   └── access_token_service/
    │       ├── README.md
    │       ├── requirements.txt
    │       └── run_token_service.py
    ├── tts-typescript-lipsync/
    │   ├── .gitignore
    │   ├── README.md
    │   ├── index.html
    │   ├── index.tsx
    │   ├── mouth.ts
    │   ├── package.json
    │   ├── tsconfig.json
    │   └── vite-env.d.ts
    ├── tts-typescript-quickstart/
    │   ├── .gitignore
    │   ├── README.md
    │   ├── audio_player.ts
    │   ├── index.test.ts
    │   ├── index.ts
    │   ├── package.json
    │   ├── pnpm-workspace.yaml
    │   ├── tsconfig.json
    │   ├── vite-env.d.ts
    │   ├── vite.config.ts
    │   └── vitest.config.ts
    └── tts-unity-quickstart/
        ├── .gitignore
        ├── Assets/
        │   ├── DefaultScene.unity
        │   ├── DefaultScene.unity.meta
        │   ├── Scripts/
        │   │   ├── HumeSpeaker.cs
        │   │   ├── HumeSpeaker.cs.meta
        │   │   ├── SceneBuilder.cs
        │   │   └── SceneBuilder.cs.meta
        │   └── Scripts.meta
        ├── Packages/
        │   └── manifest.json
        ├── ProjectSettings/
        │   ├── AudioManager.asset
        │   ├── ClusterInputManager.asset
        │   ├── DynamicsManager.asset
        │   ├── EditorBuildSettings.asset
        │   ├── EditorSettings.asset
        │   ├── GraphicsSettings.asset
        │   ├── InputManager.asset
        │   ├── MemorySettings.asset
        │   ├── MultiplayerManager.asset
        │   ├── NavMeshAreas.asset
        │   ├── PackageManagerSettings.asset
        │   ├── Physics2DSettings.asset
        │   ├── PresetManager.asset
        │   ├── ProjectSettings.asset
        │   ├── ProjectVersion.txt
        │   ├── QualitySettings.asset
        │   ├── SceneTemplateSettings.json
        │   ├── TagManager.asset
        │   ├── TimeManager.asset
        │   ├── UnityConnectSettings.asset
        │   ├── VFXManager.asset
        │   └── VersionControlSettings.asset
        └── README.md

================================================
FILE CONTENTS
================================================

================================================
FILE: .github/dependabot.yml
================================================
version: 2

updates:
  # check for updated versions of github actions on a weekly basis
  - package-ecosystem: 'github-actions'
    directory: '/'
    schedule:
      interval: 'weekly'
      day: 'monday'
      time: '06:00'
      timezone: 'America/New_York'
    commit-message:
      prefix: '[github actions] '
    open-pull-requests-limit: 20

  # check for updated versions of npm dependencies on a daily basis
  - package-ecosystem: 'npm'
    directories:
      - '/evi/evi-next-js-app-router-quickstart'
      - '/evi/evi-next-js-function-calling'
      - '/evi/evi-next-js-pages-router-quickstart'
      - '/evi/evi-typescript-chat-history'
      - '/evi/evi-typescript-function-calling'
      - '/evi/evi-typescript-quickstart'
      - '/evi/evi-typescript-webhooks'
      - '/evi/evi-vue-widget'
      - '/tts/tts-next-js-agora'
      - '/tts/tts-next-js-vercel-ai-sdk'
      - '/tts/tts-typescript-lipsync'
      - '/tts/tts-typescript-quickstart'
    schedule:
      interval: 'daily'
      time: '06:00'
      timezone: 'America/New_York'
    commit-message:
      prefix: '[npm] '
    open-pull-requests-limit: 20
    versioning-strategy: 'increase'

  # check for updated versions of NuGet (.NET) dependencies on a daily basis
  - package-ecosystem: 'nuget'
    directories:
      - '/evi/evi-dotnet-quickstart'
      - '/tts/tts-dotnet-quickstart'
    schedule:
      interval: 'daily'
      time: '06:00'
      timezone: 'America/New_York'
    commit-message:
      prefix: '[nuget] '
    open-pull-requests-limit: 20

  # check for updated versions of pip dependencies on a daily basis
  # (excludes uv-based projects, see below)
  # pip ecosystem doesn't update uv.lock
  - package-ecosystem: 'pip'
    directories:
      - '/evi/evi-python-chat-history'
      - '/evi/evi-python-webhooks'
      - '/evi/evi-python-wss-clm-endpoint'
    schedule:
      interval: 'daily'
      time: '06:00'
      timezone: 'America/New_York'
    commit-message:
      prefix: '[pip] '
    open-pull-requests-limit: 20

  # uv-based Python projects (pyproject.toml + uv.lock)
  - package-ecosystem: 'uv'
    directories:
      - '/evi/evi-python-quickstart'
      - '/evi/evi-python-clm-sse'
      - '/evi/evi-python-clm-wss'
      - '/evi/evi-python-control-plane'
      - '/evi/evi-python-phone-calling-proxy-server'
      - '/tts/tts-python-quickstart'
      - '/tts/tts-python-livekit'
      - '/expression-measurement/streaming/python-streaming-example'
    schedule:
      interval: 'daily'
      time: '06:00'
      timezone: 'America/New_York'
    commit-message:
      prefix: '[uv] '
    open-pull-requests-limit: 20


================================================
FILE: .github/workflows/dependabot-auto-merge.yml
================================================
name: Dependabot auto-merge

on:
  pull_request_target:
    types: [opened, reopened, ready_for_review, synchronize]

permissions:
  contents: write
  pull-requests: write

jobs:
  dependabot-auto-merge:
    runs-on: ubuntu-latest
    if: github.actor == 'dependabot[bot]'

    steps:
      - name: Dependabot metadata
        id: metadata
        uses: dependabot/fetch-metadata@v3.1.0
        with:
            github-token: ${{ secrets.GITHUB_TOKEN }}

      - name: Approve PR
        env:
          PR_URL: ${{ github.event.pull_request.html_url }}
          GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
        run: gh pr review --approve "$PR_URL"

      - name: Enable auto-merge for Dependabot PRs
        env:
          PR_URL: ${{ github.event.pull_request.html_url }}
          GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
        run: gh pr merge --auto --squash "$PR_URL"


================================================
FILE: .github/workflows/test-examples.yml
================================================
name: test-examples

on:
  pull_request:
    types: [opened, synchronize, reopened]
  push:
    branches: [main, master]
  workflow_dispatch:
  schedule:
    # Run tests for all packages that have tests weekly on Tuesday
    - cron: '0 12 * * 2'

permissions:
  contents: read

jobs:
  detect-changes:
    runs-on: ubuntu-latest
    outputs:
      evi_py_quickstart: ${{ steps.filter.outputs.evi_py_quickstart }}
      evi_py_chat_history: ${{ steps.filter.outputs.evi_py_chat_history }}
      evi_py_clm_sse: ${{ steps.filter.outputs.evi_py_clm_sse }}
      evi_py_clm_wss: ${{ steps.filter.outputs.evi_py_clm_wss }}
      evi_py_control_plane: ${{ steps.filter.outputs.evi_py_control_plane }}
      evi_py_phone_calling: ${{ steps.filter.outputs.evi_py_phone_calling }}
      evi_py_webhooks: ${{ steps.filter.outputs.evi_py_webhooks }}
      evi_py_wss_clm_endpoint: ${{ steps.filter.outputs.evi_py_wss_clm_endpoint }}
      tts_py_livekit: ${{ steps.filter.outputs.tts_py_livekit }}
      tts_py_quickstart: ${{ steps.filter.outputs.tts_py_quickstart }}
      evi_app_router: ${{ steps.filter.outputs.evi_app_router }}
      evi_function_calling: ${{ steps.filter.outputs.evi_function_calling }}
      evi_pages_router: ${{ steps.filter.outputs.evi_pages_router }}
      evi_react_native: ${{ steps.filter.outputs.evi_react_native }}
      evi_ts_chat_history: ${{ steps.filter.outputs.evi_ts_chat_history }}
      evi_ts_function_calling: ${{ steps.filter.outputs.evi_ts_function_calling }}
      evi_ts_quickstart: ${{ steps.filter.outputs.evi_ts_quickstart }}
      evi_ts_webhooks: ${{ steps.filter.outputs.evi_ts_webhooks }}
      evi_vue_widget: ${{ steps.filter.outputs.evi_vue_widget }}
      tts_next_agora: ${{ steps.filter.outputs.tts_next_agora }}
      tts_next_vercel_ai_sdk: ${{ steps.filter.outputs.tts_next_vercel_ai_sdk }}
      tts_ts_lipsync: ${{ steps.filter.outputs.tts_ts_lipsync }}
      tts_ts_quickstart: ${{ steps.filter.outputs.tts_ts_quickstart }}
      evi_dotnet_quickstart: ${{ steps.filter.outputs.evi_dotnet_quickstart }}
      tts_dotnet_quickstart: ${{ steps.filter.outputs.tts_dotnet_quickstart }}
      exp_meas_ts_raw_text_processor: ${{ steps.filter.outputs.exp_meas_ts_raw_text_processor }}
      exp_meas_py_streaming: ${{ steps.filter.outputs.exp_meas_py_streaming }}
    steps:
      - name: Checkout
        uses: actions/checkout@v6
        with:
          fetch-depth: 0

      - name: Detect changed folders
        id: filter
        uses: dorny/paths-filter@v4
        with:
          filters: |
            evi_py_quickstart:
              - 'evi/evi-python-quickstart/**'
            evi_py_chat_history:
              - 'evi/evi-python-chat-history/**'
            evi_py_clm_sse:
              - 'evi/evi-python-clm-sse/**'
            evi_py_clm_wss:
              - 'evi/evi-python-clm-wss/**'
            evi_py_control_plane:
              - 'evi/evi-python-control-plane/**'
            evi_py_phone_calling:
              - 'evi/evi-python-phone-calling-proxy-server/**'
            evi_py_webhooks:
              - 'evi/evi-python-webhooks/**'
            evi_py_wss_clm_endpoint:
              - 'evi/evi-python-wss-clm-endpoint/**'
            tts_py_livekit:
              - 'tts/tts-python-livekit/**'
            tts_py_quickstart:
              - 'tts/tts-python-quickstart/**'
            evi_app_router:
              - 'evi/evi-next-js-app-router-quickstart/**'
            evi_function_calling:
              - 'evi/evi-next-js-function-calling/**'
            evi_pages_router:
              - 'evi/evi-next-js-pages-router-quickstart/**'
            evi_react_native:
              - 'evi/evi-react-native/**'
            evi_ts_chat_history:
              - 'evi/evi-typescript-chat-history/**'
            evi_ts_function_calling:
              - 'evi/evi-typescript-function-calling/**'
            evi_ts_quickstart:
              - 'evi/evi-typescript-quickstart/**'
            evi_ts_webhooks:
              - 'evi/evi-typescript-webhooks/**'
            evi_vue_widget:
              - 'evi/evi-vue-widget/**'
            tts_next_agora:
              - 'tts/tts-next-js-agora/**'
            tts_next_vercel_ai_sdk:
              - 'tts/tts-next-js-vercel-ai-sdk/**'
            tts_ts_lipsync:
              - 'tts/tts-typescript-lipsync/**'
            tts_ts_quickstart:
              - 'tts/tts-typescript-quickstart/**'
            evi_dotnet_quickstart:
              - 'evi/evi-dotnet-quickstart/**'
            tts_dotnet_quickstart:
              - 'tts/tts-dotnet-quickstart/**'
            exp_meas_ts_raw_text_processor:
              - 'expression-measurement/batch/typescript-raw-text-processor/**'
            exp_meas_py_streaming:
              - 'expression-measurement/streaming/python-streaming-example/**'

  test-evi-typescript-quickstart:
    needs: detect-changes
    if: github.event_name == 'workflow_dispatch' || github.event_name == 'schedule' || needs.detect-changes.outputs.evi_ts_quickstart == 'true'
    runs-on: ubuntu-latest
    steps:
      - name: Checkout
        uses: actions/checkout@v6

      - name: Setup pnpm
        uses: pnpm/action-setup@v6
        with:
          version: 10.20.0

      - name: Setup Node
        uses: actions/setup-node@v6
        with:
          node-version: 20

      - name: Install deps (EVI quickstart)
        working-directory: evi/evi-typescript-quickstart
        run: pnpm install --frozen-lockfile

      - name: Build
        working-directory: evi/evi-typescript-quickstart
        run: pnpm run build

      - name: Run tests in evi-typescript-quickstart
        working-directory: evi/evi-typescript-quickstart
        env:
          TEST_HUME_API_KEY: ${{ secrets.TEST_HUME_API_KEY }}
          TEST_HUME_SECRET_KEY: ${{ secrets.TEST_HUME_SECRET_KEY }}
        run: pnpm run test

  test-tts-typescript-quickstart:
    needs: detect-changes
    if: github.event_name == 'workflow_dispatch' || github.event_name == 'schedule' || needs.detect-changes.outputs.tts_ts_quickstart == 'true'
    runs-on: ubuntu-latest
    steps:
      - name: Checkout
        uses: actions/checkout@v6

      - name: Setup pnpm
        uses: pnpm/action-setup@v6
        with:
          version: 10.20.0

      - name: Setup Node
        uses: actions/setup-node@v6
        with:
          node-version: 20

      - name: Install deps (TTS quickstart)
        working-directory: tts/tts-typescript-quickstart
        run: pnpm install --frozen-lockfile

      - name: Build
        working-directory: tts/tts-typescript-quickstart
        run: pnpm run build

      - name: Run tests in tts-typescript-quickstart
        working-directory: tts/tts-typescript-quickstart
        env:
          TEST_HUME_API_KEY: ${{ secrets.TEST_HUME_API_KEY }}
          TEST_HUME_SECRET_KEY: ${{ secrets.TEST_HUME_SECRET_KEY }}
        run: pnpm run test

  test-exp-meas-typescript-raw-text-processor:
    needs: detect-changes
    if: github.event_name == 'workflow_dispatch' || github.event_name == 'schedule' || needs.detect-changes.outputs.exp_meas_ts_raw_text_processor == 'true'
    runs-on: ubuntu-latest
    steps:
      - name: Checkout
        uses: actions/checkout@v6

      - name: Setup Node
        uses: actions/setup-node@v6
        with:
          node-version: 20

      - name: Setup pnpm
        uses: pnpm/action-setup@v6
        with:
          version: 10.20.0

      - name: Install deps (expression-measurement batch typescript-raw-text-processor)
        working-directory: expression-measurement/batch/typescript-raw-text-processor
        run: pnpm install --frozen-lockfile

      - name: Run tests in typescript-raw-text-processor
        working-directory: expression-measurement/batch/typescript-raw-text-processor
        env:
          HUME_API_KEY: ${{ secrets.TEST_HUME_API_KEY_EXP_MEASUREMENT }}
        run: pnpm run test

  test-exp-meas-python-streaming:
    needs: detect-changes
    if: github.event_name == 'workflow_dispatch' || github.event_name == 'schedule' || needs.detect-changes.outputs.exp_meas_py_streaming == 'true'
    runs-on: ubuntu-latest
    steps:
      - name: Checkout
        uses: actions/checkout@v6

      - name: Install uv
        run: |
          curl -LsSf https://astral.sh/uv/install.sh | sh
          echo "$HOME/.cargo/bin" >> $GITHUB_PATH
        shell: bash

      - name: Run tests in expression-measurement/streaming/python-streaming-example
        working-directory: expression-measurement/streaming/python-streaming-example
        env:
          HUME_API_KEY: ${{ secrets.TEST_HUME_API_KEY_EXP_MEASUREMENT }}
        run: uv sync --extra dev && uv run pytest test_main.py -v

  test-evi-python-quickstart:
    needs: detect-changes
    if: github.event_name == 'workflow_dispatch' || github.event_name == 'schedule' || needs.detect-changes.outputs.evi_py_quickstart == 'true'
    runs-on: ubuntu-latest
    steps:
      - name: Checkout
        uses: actions/checkout@v6

      - name: Set up python
        uses: actions/setup-python@v6
        with:
          python-version: '3.11'

      - name: Bootstrap poetry
        run: |
          curl -sSL https://install.python-poetry.org | python - -y --version 1.8.5
          echo "$HOME/.local/bin" >> $GITHUB_PATH

      - name: Install system dependencies for audio
        run: |
          sudo apt-get --yes update
          sudo apt-get --yes install libportaudio2

      - name: Install uv
        run: |
          curl -LsSf https://astral.sh/uv/install.sh | sh
          echo "$HOME/.cargo/bin" >> $GITHUB_PATH
        shell: bash

      - name: Run tests in evi-python-quickstart
        working-directory: evi/evi-python-quickstart
        run: uv run pytest test_quickstart.py -v
        env:
          TEST_HUME_API_KEY: ${{ secrets.TEST_HUME_API_KEY }}

  test-tts-python-quickstart:
    needs: detect-changes
    if: github.event_name == 'workflow_dispatch' || github.event_name == 'schedule' || needs.detect-changes.outputs.tts_py_quickstart == 'true'
    runs-on: ubuntu-latest
    steps:
      - name: Checkout
        uses: actions/checkout@v6

      - name: Set up python
        uses: actions/setup-python@v6
        with:
          python-version: '3.11'

      - name: Bootstrap poetry
        run: |
          curl -sSL https://install.python-poetry.org | python - -y --version 1.8.5
          echo "$HOME/.local/bin" >> $GITHUB_PATH

      - name: Install system dependencies for audio
        run: |
          sudo apt-get --yes update
          sudo apt-get --yes install libportaudio2

      - name: Install uv
        run: |
          curl -LsSf https://astral.sh/uv/install.sh | sh
          echo "$HOME/.cargo/bin" >> $GITHUB_PATH
        shell: bash

      - name: Run tests in tts-python-quickstart
        working-directory: tts/tts-python-quickstart
        run: uv run pytest test_app.py -v
        env:
          TEST_HUME_API_KEY: ${{ secrets.TEST_HUME_API_KEY }}

  evi-python-chat-history:
    needs: detect-changes
    if: github.event_name == 'workflow_dispatch' || needs.detect-changes.outputs.evi_py_chat_history == 'true'
    runs-on: ubuntu-latest
    steps:
      - name: Checkout
        uses: actions/checkout@v6

      - name: Set up Python
        uses: actions/setup-python@v6
        with:
          python-version: '3.11'

      - name: Install Poetry
        run: |
          curl -sSL https://install.python-poetry.org | python - -y --version 1.8.5
          echo "$HOME/.local/bin" >> $GITHUB_PATH

      - name: Install dependencies
        working-directory: evi/evi-python-chat-history
        run: poetry install --no-interaction --no-root

      - name: Verify
        working-directory: evi/evi-python-chat-history
        run: poetry run python -c "print('OK')"

  evi-python-clm-sse:
    needs: detect-changes
    if: github.event_name == 'workflow_dispatch' || needs.detect-changes.outputs.evi_py_clm_sse == 'true'
    runs-on: ubuntu-latest
    steps:
      - name: Checkout
        uses: actions/checkout@v6

      - name: Set up Python
        uses: actions/setup-python@v6
        with:
          python-version: '3.11'

      - name: Install uv
        run: |
          curl -LsSf https://astral.sh/uv/install.sh | sh
          echo "$HOME/.cargo/bin" >> $GITHUB_PATH
        shell: bash

      - name: Install dependencies
        working-directory: evi/evi-python-clm-sse
        run: uv sync

      - name: Verify
        working-directory: evi/evi-python-clm-sse
        run: uv run python -c "print('OK')"

  evi-python-clm-wss:
    needs: detect-changes
    if: github.event_name == 'workflow_dispatch' || needs.detect-changes.outputs.evi_py_clm_wss == 'true'
    runs-on: ubuntu-latest
    steps:
      - name: Checkout
        uses: actions/checkout@v6

      - name: Set up Python
        uses: actions/setup-python@v6
        with:
          python-version: '3.11'

      - name: Install uv
        run: |
          curl -LsSf https://astral.sh/uv/install.sh | sh
          echo "$HOME/.cargo/bin" >> $GITHUB_PATH
        shell: bash

      - name: Install dependencies
        working-directory: evi/evi-python-clm-wss
        run: uv sync

      - name: Verify
        working-directory: evi/evi-python-clm-wss
        run: uv run python -c "print('OK')"

  evi-python-control-plane:
    needs: detect-changes
    if: github.event_name == 'workflow_dispatch' || needs.detect-changes.outputs.evi_py_control_plane == 'true'
    runs-on: ubuntu-latest
    steps:
      - name: Checkout
        uses: actions/checkout@v6

      - name: Set up Python
        uses: actions/setup-python@v6
        with:
          python-version: '3.11'

      - name: Install system dependencies for audio
        run: |
          sudo apt-get --yes update
          sudo apt-get --yes install libportaudio2

      - name: Install uv
        run: |
          curl -LsSf https://astral.sh/uv/install.sh | sh
          echo "$HOME/.cargo/bin" >> $GITHUB_PATH
        shell: bash

      - name: Install dependencies
        working-directory: evi/evi-python-control-plane
        run: uv sync

      - name: Verify
        working-directory: evi/evi-python-control-plane
        run: uv run python -c "print('OK')"

  evi-python-phone-calling-proxy-server:
    needs: detect-changes
    if: github.event_name == 'workflow_dispatch' || needs.detect-changes.outputs.evi_py_phone_calling == 'true'
    runs-on: ubuntu-latest
    steps:
      - name: Checkout
        uses: actions/checkout@v6

      - name: Set up Python
        uses: actions/setup-python@v6
        with:
          python-version: '3.11'

      - name: Install uv
        run: |
          curl -LsSf https://astral.sh/uv/install.sh | sh
          echo "$HOME/.cargo/bin" >> $GITHUB_PATH
        shell: bash

      - name: Install dependencies
        working-directory: evi/evi-python-phone-calling-proxy-server
        run: uv sync

      - name: Verify
        working-directory: evi/evi-python-phone-calling-proxy-server
        run: uv run python -c "print('OK')"

  evi-python-webhooks:
    needs: detect-changes
    if: github.event_name == 'workflow_dispatch' || needs.detect-changes.outputs.evi_py_webhooks == 'true'
    runs-on: ubuntu-latest
    steps:
      - name: Checkout
        uses: actions/checkout@v6

      - name: Set up Python
        uses: actions/setup-python@v6
        with:
          python-version: '3.11'

      - name: Install Poetry
        run: |
          curl -sSL https://install.python-poetry.org | python - -y --version 1.8.5
          echo "$HOME/.local/bin" >> $GITHUB_PATH

      - name: Install dependencies
        working-directory: evi/evi-python-webhooks
        run: poetry install --no-interaction --no-root

      - name: Verify
        working-directory: evi/evi-python-webhooks
        run: poetry run python -c "print('OK')"

  evi-python-wss-clm-endpoint:
    needs: detect-changes
    if: github.event_name == 'workflow_dispatch' || needs.detect-changes.outputs.evi_py_wss_clm_endpoint == 'true'
    runs-on: ubuntu-latest
    steps:
      - name: Checkout
        uses: actions/checkout@v6

      - name: Set up Python
        uses: actions/setup-python@v6
        with:
          python-version: '3.11'

      - name: Install Poetry
        run: |
          curl -sSL https://install.python-poetry.org | python - -y --version 1.8.5
          echo "$HOME/.local/bin" >> $GITHUB_PATH

      - name: Install dependencies
        working-directory: evi/evi-python-wss-clm-endpoint
        run: poetry install --no-interaction --no-root

      - name: Verify
        working-directory: evi/evi-python-wss-clm-endpoint
        run: poetry run python -c "print('OK')"

  tts-python-livekit:
    needs: detect-changes
    if: github.event_name == 'workflow_dispatch' || needs.detect-changes.outputs.tts_py_livekit == 'true'
    runs-on: ubuntu-latest
    steps:
      - name: Checkout
        uses: actions/checkout@v6

      - name: Set up Python
        uses: actions/setup-python@v6
        with:
          python-version: '3.11'

      - name: Install system dependencies for audio
        run: |
          sudo apt-get --yes update
          sudo apt-get --yes install libportaudio2 libasound2-dev

      - name: Install uv
        run: |
          curl -LsSf https://astral.sh/uv/install.sh | sh
          echo "$HOME/.cargo/bin" >> $GITHUB_PATH
        shell: bash

      - name: Install dependencies
        working-directory: tts/tts-python-livekit
        run: uv sync

      - name: Verify
        working-directory: tts/tts-python-livekit
        run: uv run python -c "print('OK')"

  evi-dotnet-quickstart:
    needs: detect-changes
    if: github.event_name == 'workflow_dispatch' || github.event_name == 'schedule' || needs.detect-changes.outputs.evi_dotnet_quickstart == 'true'
    runs-on: ubuntu-latest
    steps:
      - name: Checkout
        uses: actions/checkout@v6

      - name: Setup .NET
        uses: actions/setup-dotnet@v5
        with:
          dotnet-version: '9.0.x'

      - name: Restore
        working-directory: evi/evi-dotnet-quickstart
        run: dotnet restore evi-csharp-quickstart.tests.csproj

      - name: Build
        working-directory: evi/evi-dotnet-quickstart
        run: dotnet build evi-csharp-quickstart.tests.csproj --no-restore -c Release

      - name: Run tests
        working-directory: evi/evi-dotnet-quickstart
        env:
          TEST_HUME_API_KEY: ${{ secrets.TEST_HUME_API_KEY }}
          HUME_API_KEY: ${{ secrets.TEST_HUME_API_KEY }}
        run: dotnet test evi-csharp-quickstart.tests.csproj --no-build -c Release -v normal

  tts-dotnet-quickstart:
    needs: detect-changes
    if: github.event_name == 'workflow_dispatch' || github.event_name == 'schedule' || needs.detect-changes.outputs.tts_dotnet_quickstart == 'true'
    runs-on: ubuntu-latest
    steps:
      - name: Checkout
        uses: actions/checkout@v6

      - name: Setup .NET
        uses: actions/setup-dotnet@v5
        with:
          dotnet-version: '9.0.x'

      - name: Restore
        working-directory: tts/tts-dotnet-quickstart
        run: dotnet restore tts-csharp-quickstart.tests.csproj

      - name: Build
        working-directory: tts/tts-dotnet-quickstart
        run: dotnet build tts-csharp-quickstart.tests.csproj --no-restore -c Release

      - name: Run tests
        working-directory: tts/tts-dotnet-quickstart
        env:
          TEST_HUME_API_KEY: ${{ secrets.TEST_HUME_API_KEY }}
          HUME_API_KEY: ${{ secrets.TEST_HUME_API_KEY }}
        run: dotnet test tts-csharp-quickstart.tests.csproj --no-build -c Release -v normal

  evi-next-js-app-router-quickstart:
    needs: detect-changes
    if: github.event_name == 'workflow_dispatch' || github.event_name == 'schedule' || needs.detect-changes.outputs.evi_app_router == 'true'
    runs-on: ubuntu-latest
    steps:
      - name: Checkout
        uses: actions/checkout@v6

      - name: Setup pnpm
        uses: pnpm/action-setup@v6
        with:
          version: 10.20.0

      - name: Setup Node
        uses: actions/setup-node@v6
        with:
          node-version: 20
          cache: 'pnpm'
          cache-dependency-path: evi/evi-next-js-app-router-quickstart/pnpm-lock.yaml

      - name: Install dependencies
        working-directory: evi/evi-next-js-app-router-quickstart
        run: pnpm install --frozen-lockfile

      - name: Build
        working-directory: evi/evi-next-js-app-router-quickstart
        run: pnpm run build

      - name: Install Playwright browsers
        working-directory: evi/evi-next-js-app-router-quickstart
        run: pnpm exec playwright install --with-deps chromium

      - name: Run tests
        working-directory: evi/evi-next-js-app-router-quickstart
        env:
          TEST_HUME_API_KEY: ${{ secrets.TEST_HUME_API_KEY }}
          TEST_HUME_SECRET_KEY: ${{ secrets.TEST_HUME_SECRET_KEY }}
          HUME_API_KEY: ${{ secrets.TEST_HUME_API_KEY }}
          HUME_SECRET_KEY: ${{ secrets.TEST_HUME_SECRET_KEY }}
        run: pnpm run test

  # Notifies Slack when the daily (cron) test run finishes. Add repo secret SLACK_WEBHOOK_URL (Incoming Webhook URL).
  notify-slack-cron:
    needs:
      - test-evi-typescript-quickstart
      - test-tts-typescript-quickstart
      - test-exp-meas-typescript-raw-text-processor
      - test-exp-meas-python-streaming
      - evi-next-js-app-router-quickstart
      - test-evi-python-quickstart
      - test-tts-python-quickstart
      - evi-dotnet-quickstart
      - tts-dotnet-quickstart
    # always() ensures we run even when one of the test jobs failed, so we can post the result to Slack
    if: always() && (github.event_name == 'schedule' || github.event_name == 'workflow_dispatch')
    runs-on: ubuntu-latest
    steps:
      - name: Checkout
        uses: actions/checkout@v6

      - name: Notify Slack
        env:
          SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_URL }}
          EVI_TS: ${{ needs.test-evi-typescript-quickstart.result }}
          TTS_TS: ${{ needs.test-tts-typescript-quickstart.result }}
          EXP_MEAS_TS_RAW_TEXT_PROCESSOR: ${{ needs.test-exp-meas-typescript-raw-text-processor.result }}
          EVI_PY: ${{ needs.test-evi-python-quickstart.result }}
          TTS_PY: ${{ needs.test-tts-python-quickstart.result }}
          EXP_MEAS_PY_STREAMING: ${{ needs.test-exp-meas-python-streaming.result }}
          EVI_DOTNET: ${{ needs.evi-dotnet-quickstart.result }}
          TTS_DOTNET: ${{ needs.tts-dotnet-quickstart.result }}
          EVI_APP: ${{ needs.evi-next-js-app-router-quickstart.result }}
        run: |
          if [ -z "$SLACK_WEBHOOK_URL" ]; then
            echo "::warning::SLACK_WEBHOOK_URL secret is not set. Add it in repo Settings → Secrets and variables → Actions to get Slack notifications."
            echo "## Slack notification skipped" >> $GITHUB_STEP_SUMMARY
            echo "Add the **SLACK_WEBHOOK_URL** repository secret (Settings → Secrets and variables → Actions) to enable Slack notifications." >> $GITHUB_STEP_SUMMARY
            exit 0
          fi
          status() { [ "$1" = "success" ] && echo "✅ $1" || echo "❌ $1"; }
          # SDK versions from repo manifests (package name + version spec)
          EVI_TS_VER=$(jq -r '.dependencies.hume' evi/evi-typescript-quickstart/package.json 2>/dev/null || echo "?")
          TTS_TS_VER=$(jq -r '.dependencies.hume' tts/tts-typescript-quickstart/package.json 2>/dev/null || echo "?")
          EXP_MEAS_TS_VER=$(jq -r '.dependencies.hume' expression-measurement/batch/typescript-raw-text-processor/package.json 2>/dev/null || echo "?")
          EVI_APP_VER=$(jq -r '.dependencies["@humeai/voice-react"]' evi/evi-next-js-app-router-quickstart/package.json 2>/dev/null || echo "?")
          EVI_PY_VER=$(python3 -c "import tomllib; d=tomllib.load(open('evi/evi-python-quickstart/pyproject.toml','rb')); deps=d.get('project',{}).get('dependencies',[]); print(next((x for x in deps if 'hume' in x), ''))" 2>/dev/null || echo "?")
          TTS_PY_VER=$(python3 -c "import tomllib; d=tomllib.load(open('tts/tts-python-quickstart/pyproject.toml','rb')); deps=d.get('project',{}).get('dependencies',[]); print(next((x for x in deps if 'hume' in x), ''))" 2>/dev/null || echo "?")
          EXP_MEAS_PY_VER=$(python3 -c "import tomllib; d=tomllib.load(open('expression-measurement/streaming/python-streaming-example/pyproject.toml','rb')); deps=d.get('project',{}).get('dependencies',[]); print(next((x for x in deps if 'hume' in x), ''))" 2>/dev/null || echo "?")
          EVI_DOTNET_VER=$(sed -n 's/.*PackageVersion Include="Hume" Version="\([^"]*\)".*/\1/p' Directory.Packages.props 2>/dev/null | head -1 || echo "?")
          TTS_DOTNET_VER=$(sed -n 's/.*PackageVersion Include="Hume" Version="\([^"]*\)".*/\1/p' Directory.Packages.props 2>/dev/null | head -1 || echo "?")
          text="*Daily test-examples cron job finished*
          • TS SDK (hume $EVI_TS_VER): evi-typescript-quickstart: $(status "$EVI_TS")
          • TS SDK (hume $TTS_TS_VER): tts-typescript-quickstart: $(status "$TTS_TS")
          • TS SDK (hume $EXP_MEAS_TS_VER): exp-meas-ts-raw-text-processor (batch): $(status "$EXP_MEAS_TS_RAW_TEXT_PROCESSOR")
          • React SDK (@humeai/voice-react $EVI_APP_VER): evi-next-js-app-router-quickstart: $(status "$EVI_APP")
          • Py SDK (hume $EVI_PY_VER): evi-python-quickstart: $(status "$EVI_PY")
          • Py SDK (hume $TTS_PY_VER): tts-python-quickstart: $(status "$TTS_PY")
          • Py SDK (hume $EXP_MEAS_PY_VER): exp-meas-python-streaming: $(status "$EXP_MEAS_PY_STREAMING")
          • .NET SDK (Hume $EVI_DOTNET_VER): evi-dotnet-quickstart: $(status "$EVI_DOTNET")
          • .NET SDK (Hume $TTS_DOTNET_VER): tts-dotnet-quickstart: $(status "$TTS_DOTNET")
          <${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }}|View run>"
          payload=$(jq -n --arg text "$(echo "$text" | sed 's/^[[:space:]]*//')" '{text: $text}')
          res=$(curl -sS -w "%{http_code}" -o /tmp/slack_resp -X POST -H "Content-Type: application/json" --data "$payload" "$SLACK_WEBHOOK_URL") || true
          if [ "$res" = "200" ]; then
            echo "Slack notification sent successfully."
            echo "## Slack notification sent" >> $GITHUB_STEP_SUMMARY
          else
            echo "::warning::Slack webhook returned HTTP $res. Check the webhook URL and channel."
            echo "## Slack notification failed (HTTP $res)" >> $GITHUB_STEP_SUMMARY
          fi

  evi-next-js-function-calling:
    needs: detect-changes
    if: github.event_name == 'workflow_dispatch' || needs.detect-changes.outputs.evi_function_calling == 'true'
    runs-on: ubuntu-latest
    env:
      HUME_API_KEY: ${{ secrets.TEST_HUME_API_KEY }}
      HUME_SECRET_KEY: ${{ secrets.TEST_HUME_SECRET_KEY }}
    steps:
      - name: Checkout
        uses: actions/checkout@v6

      - name: Setup pnpm
        uses: pnpm/action-setup@v6
        with:
          version: 10.20.0

      - name: Setup Node
        uses: actions/setup-node@v6
        with:
          node-version: 20
          cache: 'pnpm'
          cache-dependency-path: evi/evi-next-js-function-calling/pnpm-lock.yaml

      - name: Install dependencies
        working-directory: evi/evi-next-js-function-calling
        run: pnpm install --frozen-lockfile

      - name: Build
        working-directory: evi/evi-next-js-function-calling
        run: pnpm run build

  evi-next-js-pages-router-quickstart:
    needs: detect-changes
    if: github.event_name == 'workflow_dispatch' || needs.detect-changes.outputs.evi_pages_router == 'true'
    runs-on: ubuntu-latest
    env:
      HUME_API_KEY: ${{ secrets.TEST_HUME_API_KEY }}
      HUME_SECRET_KEY: ${{ secrets.TEST_HUME_SECRET_KEY }}
    steps:
      - name: Checkout
        uses: actions/checkout@v6

      - name: Setup pnpm
        uses: pnpm/action-setup@v6
        with:
          version: 10.20.0

      - name: Setup Node
        uses: actions/setup-node@v6
        with:
          node-version: 20
          cache: 'pnpm'
          cache-dependency-path: evi/evi-next-js-pages-router-quickstart/pnpm-lock.yaml

      - name: Install dependencies
        working-directory: evi/evi-next-js-pages-router-quickstart
        run: pnpm install --frozen-lockfile

      - name: Build
        working-directory: evi/evi-next-js-pages-router-quickstart
        run: pnpm run build

  evi-react-native:
    needs: detect-changes
    if: github.event_name == 'workflow_dispatch' || needs.detect-changes.outputs.evi_react_native == 'true'
    runs-on: ubuntu-latest
    steps:
      - name: Checkout
        uses: actions/checkout@v6

      - name: Setup pnpm
        uses: pnpm/action-setup@v6
        with:
          version: 10.20.0

      - name: Setup Node
        uses: actions/setup-node@v6
        with:
          node-version: 20
          cache: 'pnpm'
          cache-dependency-path: evi/evi-react-native/pnpm-lock.yaml

      - name: Install dependencies
        working-directory: evi/evi-react-native
        run: pnpm install --frozen-lockfile

      - name: Lint
        working-directory: evi/evi-react-native
        run: pnpm run lint

  evi-typescript-chat-history:
    needs: detect-changes
    if: github.event_name == 'workflow_dispatch' || needs.detect-changes.outputs.evi_ts_chat_history == 'true'
    runs-on: ubuntu-latest
    steps:
      - name: Checkout
        uses: actions/checkout@v6

      - name: Setup pnpm
        uses: pnpm/action-setup@v6
        with:
          version: 10.20.0

      - name: Setup Node
        uses: actions/setup-node@v6
        with:
          node-version: 20
          cache: 'pnpm'
          cache-dependency-path: evi/evi-typescript-chat-history/pnpm-lock.yaml

      - name: Install dependencies
        working-directory: evi/evi-typescript-chat-history
        run: pnpm install --frozen-lockfile

      - name: Build
        working-directory: evi/evi-typescript-chat-history
        run: pnpm run build

  evi-typescript-function-calling:
    needs: detect-changes
    if: github.event_name == 'workflow_dispatch' || needs.detect-changes.outputs.evi_ts_function_calling == 'true'
    runs-on: ubuntu-latest
    steps:
      - name: Checkout
        uses: actions/checkout@v6

      - name: Setup pnpm
        uses: pnpm/action-setup@v6
        with:
          version: 10.20.0

      - name: Setup Node
        uses: actions/setup-node@v6
        with:
          node-version: 20

      - name: Install dependencies
        working-directory: evi/evi-typescript-function-calling
        run: pnpm install --frozen-lockfile

      - name: Build
        working-directory: evi/evi-typescript-function-calling
        run: pnpm run build

  evi-typescript-webhooks:
    needs: detect-changes
    if: github.event_name == 'workflow_dispatch' || needs.detect-changes.outputs.evi_ts_webhooks == 'true'
    runs-on: ubuntu-latest
    steps:
      - name: Checkout
        uses: actions/checkout@v6

      - name: Setup pnpm
        uses: pnpm/action-setup@v6
        with:
          version: 10.20.0

      - name: Setup Node
        uses: actions/setup-node@v6
        with:
          node-version: 20

      - name: Install dependencies
        working-directory: evi/evi-typescript-webhooks
        run: pnpm install --frozen-lockfile

  evi-vue-widget:
    needs: detect-changes
    if: github.event_name == 'workflow_dispatch' || needs.detect-changes.outputs.evi_vue_widget == 'true'
    runs-on: ubuntu-latest
    steps:
      - name: Checkout
        uses: actions/checkout@v6

      - name: Setup pnpm
        uses: pnpm/action-setup@v6
        with:
          version: 10.20.0

      - name: Setup Node
        uses: actions/setup-node@v6
        with:
          node-version: 20

      - name: Install dependencies
        working-directory: evi/evi-vue-widget
        run: pnpm install --frozen-lockfile

      - name: Build
        working-directory: evi/evi-vue-widget
        run: pnpm run build

  tts-next-js-agora:
    needs: detect-changes
    if: github.event_name == 'workflow_dispatch' || needs.detect-changes.outputs.tts_next_agora == 'true'
    runs-on: ubuntu-latest
    steps:
      - name: Checkout
        uses: actions/checkout@v6

      - name: Setup pnpm
        uses: pnpm/action-setup@v6
        with:
          version: 10.20.0

      - name: Setup Node
        uses: actions/setup-node@v6
        with:
          node-version: 20
          cache: 'pnpm'
          cache-dependency-path: tts/tts-next-js-agora/pnpm-lock.yaml

      - name: Install dependencies
        working-directory: tts/tts-next-js-agora
        run: pnpm install --frozen-lockfile

      - name: Build
        working-directory: tts/tts-next-js-agora
        run: pnpm run build

  tts-next-js-vercel-ai-sdk:
    needs: detect-changes
    if: github.event_name == 'workflow_dispatch' || needs.detect-changes.outputs.tts_next_vercel_ai_sdk == 'true'
    runs-on: ubuntu-latest
    steps:
      - name: Checkout
        uses: actions/checkout@v6

      - name: Setup pnpm
        uses: pnpm/action-setup@v6
        with:
          version: 10.20.0

      - name: Setup Node
        uses: actions/setup-node@v6
        with:
          node-version: 20
          cache: 'pnpm'
          cache-dependency-path: tts/tts-next-js-vercel-ai-sdk/pnpm-lock.yaml

      - name: Install dependencies
        working-directory: tts/tts-next-js-vercel-ai-sdk
        run: pnpm install --frozen-lockfile

      - name: Build
        working-directory: tts/tts-next-js-vercel-ai-sdk
        run: pnpm run build

  tts-typescript-lipsync:
    needs: detect-changes
    if: github.event_name == 'workflow_dispatch' || needs.detect-changes.outputs.tts_ts_lipsync == 'true'
    runs-on: ubuntu-latest
    steps:
      - name: Checkout
        uses: actions/checkout@v6

      - name: Setup pnpm
        uses: pnpm/action-setup@v6
        with:
          version: 10.20.0

      - name: Setup Node
        uses: actions/setup-node@v6
        with:
          node-version: 20

      - name: Install dependencies
        working-directory: tts/tts-typescript-lipsync
        run: pnpm install --frozen-lockfile

      - name: Build
        working-directory: tts/tts-typescript-lipsync
        run: pnpm run build


================================================
FILE: .gitignore
================================================
.hume/
__pycache__/
.venv/
.DS_Store
.env
node_modules/
.pnpm-store/
dist/
.vscode/
.mypy_cache/

================================================
FILE: Directory.Packages.props
================================================
<Project>
  <PropertyGroup>
    <ManagePackageVersionsCentrally>true</ManagePackageVersionsCentrally>
  </PropertyGroup>
  <ItemGroup>
    <!-- Shared across evi and tts .NET quickstarts; Dependabot updates Hume here -->
    <PackageVersion Include="Hume" Version="0.2.7" />
    <PackageVersion Include="DotNetEnv" Version="3.2.0" />
    <PackageVersion Include="OneOf" Version="3.0.271" />
    <PackageVersion Include="OneOf.Extended" Version="3.0.271" />
    <PackageVersion Include="System.Text.Json" Version="10.0.7" />
    <!-- Test packages (aligned across both quickstarts) -->
    <PackageVersion Include="Microsoft.NET.Test.Sdk" Version="18.5.1" />
    <PackageVersion Include="xunit" Version="2.9.3" />
    <PackageVersion Include="xunit.runner.visualstudio" Version="3.1.5" />
    <PackageVersion Include="coverlet.collector" Version="10.0.0" />
    <PackageVersion Include="Moq" Version="4.20.72" />
  </ItemGroup>
</Project>


================================================
FILE: LICENSE
================================================
MIT License

Copyright (c) 2023 Hume AI

Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:

The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.

THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.


================================================
FILE: README.md
================================================
<div align="center">
  <img src="https://storage.googleapis.com/hume-public-logos/hume/hume-banner.png">
  <h1>Hume API Examples</h1>
  <p>
    <strong>Browse sample code and projects designed to help you integrate Hume APIs</strong>
  </p>
  <p>
    <a href="https://docs.hume.ai">📘 Documentation</a> •
    <a href="https://discord.com/invite/humeai">💬 Join us on Discord</a> •
    <a href="https://dev.hume.ai/docs/introduction/api-key">🔐 Getting your API Keys</a>
  </p>
</div>

## Overview

Welcome to the official Hume API Examples repository!
Here you'll find open-source example projects and quickstart guides to help you integrate the [Hume API](https://docs.hume.ai) across a variety of languages and frameworks.

Use these examples to:

- Add empathic Text-to-Speech (TTS) to your application
- Build rich conversational agents with the Empathic Voice Interface (EVI)
- Measure expressions with facial, vocal, and language-based analysis

Whether you're using Python, TypeScript, Swift, C#, Flutter, Unity, or Next.js, there's something here to help you get started quickly.

## [Text-to-Speech (TTS)](https://dev.hume.ai/docs/text-to-speech-tts/overview)

| Name                                                                                       | Language   | Framework       |
| ------------------------------------------------------------------------------------------ | ---------- | --------------- |
| [`tts-dotnet-quickstart`](/tts/tts-dotnet-quickstart/README.md)                            | C#         | .NET            |
| [`tts-next-js-agora`](/tts/tts-next-js-agora/README.md)                                    | TypeScript | Next.js         |
| [`tts-next-js-chat`](/tts/tts-next-js-chat/README.md)                                      | TypeScript | Next.js         |
| [`tts-next-js-vercel-ai-sdk`](/tts/tts-next-js-vercel-ai-sdk/README.md)                    | TypeScript | Next.js         |
| [`tts-python-livekit`](/tts/tts-python-livekit/README.md)                                  | Python     | LiveKit         |
| [`tts-python-quickstart`](/tts/tts-python-quickstart/README.md)                            | Python     |                 |
| [`tts-swift-quickstart`](/tts/tts-swift-quickstart/README.md)                              | Swift      | iOS             |
| [`tts-typescript-lipsync`](/tts/tts-typescript-lipsync/README.md)                          | TypeScript |                 |
| [`tts-typescript-quickstart`](/tts/tts-typescript-quickstart/README.md)                    | TypeScript |                 |
| [`tts-unity-quickstart`](/tts/tts-unity-quickstart/README.md)                              | C#         | Unity           |

## [Empathic Voice Interface (EVI)](https://dev.hume.ai/docs/empathic-voice-interface-evi/overview)

| Name                                                                                       | Language   | Framework       |
| ------------------------------------------------------------------------------------------ | ---------- | --------------- |
| [`evi-dotnet-quickstart`](/evi/evi-dotnet-quickstart/README.md)                            | C#         | .NET            |
| [`evi-flutter`](/evi/evi-flutter/README.md)                                                | Dart       | Flutter         |
| [`evi-next-js-app-router-quickstart`](/evi/evi-next-js-app-router-quickstart/README.md)    | TypeScript | Next.js         |
| [`evi-next-js-function-calling`](/evi/evi-next-js-function-calling/README.md)              | TypeScript | Next.js         |
| [`evi-next-js-pages-router-quickstart`](/evi/evi-next-js-pages-router-quickstart/README.md)| TypeScript | Next.js         |
| [`evi-prompting-examples`](/evi/evi-prompting-examples/README.md)                          |            |                 |
| [`evi-python-chat-history`](/evi/evi-python-chat-history/README.md)                        | Python     |                 |
| [`evi-python-clm-sse`](/evi/evi-python-clm-sse/README.md)                                  | Python     |                 |
| [`evi-python-clm-wss`](/evi/evi-python-clm-wss/README.md)                                  | Python     |                 |
| [`evi-python-control-plane`](/evi/evi-python-control-plane/README.md)                      | Python     |                 |
| [`evi-python-function-calling`](/evi/evi-python-function-calling/README.md)                | Python     |                 |
| [`evi-python-phone-calling-proxy-server`](/evi/evi-python-phone-calling-proxy-server/README.md) | Python | Flask           |
| [`evi-python-quickstart`](/evi/evi-python-quickstart/README.md)                            | Python     |                 |
| [`evi-python-raw-api`](/evi/evi-python-raw-api/README.md)                                  | Python     |                 |
| [`evi-python-webhooks`](/evi/evi-python-webhooks/README.md)                                | Python     | FastAPI         |
| [`evi-python-wss-clm-endpoint`](/evi/evi-python-wss-clm-endpoint/)                         | Python     | Modal           |
| [`evi-react-native`](/evi/evi-react-native/README.md)                                      | TypeScript | React Native    |
| [`evi-swift-chat`](/evi/evi-swift-chat/README.md)                                          | Swift      | iOS             |
| [`evi-touchdesigner`](/evi/evi-touchdesigner/README.md)                                    | Python     | TouchDesigner   |
| [`evi-typescript-chat-history`](/evi/evi-typescript-chat-history/README.md)                | TypeScript |                 |
| [`evi-typescript-function-calling`](/evi/evi-typescript-function-calling/README.md)        | TypeScript | Vite            |
| [`evi-typescript-proxy`](/evi/evi-typescript-proxy/README.md)                              | TypeScript | Node.js         |
| [`evi-typescript-quickstart`](/evi/evi-typescript-quickstart/README.md)                    | TypeScript |                 |
| [`evi-typescript-webhooks`](/evi/evi-typescript-webhooks/README.md)                        | TypeScript | Express         |
| [`evi-unity-quickstart`](/evi/evi-unity-quickstart/README.md)                              | C#         | Unity           |
| [`evi-vue-widget`](/evi/evi-vue-widget/README.md)                                          | TypeScript | Vue             |

## [Expression Measurement API](https://dev.hume.ai/docs/expression-measurement-api/overview)

| Name                                                                                                     | Models                                | Language   | Framework   |
| -------------------------------------------------------------------------------------------------------- | ------------------------------------- | ---------- | ----------- |
| [`visualization-example`](/expression-measurement/visualization-example/example-notebook.ipynb)          | `face`                                | Python     |             |
| [`python-top-emotions`](/expression-measurement/batch/python-top-emotions/README.md)                     | `face`                                | Python     |             |
| [`typescript-raw-text-processor`](/expression-measurement/batch/typescript-raw-text-processor/README.md) | `language`                            | TypeScript |             |
| [`next-js-emotional-language`](/expression-measurement/batch/next-js-emotional-language/README.md)       | `language`                            | TypeScript | Next.js     |
| [`next-js-streaming-example`](/expression-measurement/streaming/next-js-streaming-example/README.md)     | `language`, `face`, `burst`, `speech` | TypeScript | Next.js     |

## Authentication & Setup

 You must authenticate to use the Hume API. Your API key can be retrieved from the [Hume AI platform](https://app.hume.ai/keys). For detailed instructions, see our documentation on [getting your api keys](https://dev.hume.ai/docs/introduction/api-key).

 Each example project includes a `README.md` file with step-by-step instructions on:
 - Setting your API key (usually via environment variables)
 - Installing dependencies
 - Running the example

## License

All projects are licensed under the MIT License - see the [LICENSE.txt](/LICENSE) file for details.


================================================
FILE: evi/evi-dotnet-quickstart/.gitignore
================================================
# Build outputs
[Bb]in/
[Oo]bj/

# IDE
.vs/
.idea/
*.user
*.suo

# macOS
.DS_Store

# Environment
.env

# Allow sample audio
!sample_input.pcm


================================================
FILE: evi/evi-dotnet-quickstart/EviTests.cs
================================================
// To run tests:
// dotnet test evi-csharp-quickstart.tests.csproj --logger "console;verbosity=detailed"

using System;
using System.Collections.Generic;
using System.Linq;
using System.Text.Json;
using System.Threading.Tasks;
using DotNetEnv;
using Hume;
using Hume.EmpathicVoice;
using OneOf;
using Xunit;
using Xunit.Abstractions;

namespace EviCsharpQuickstart.Tests;

public class EviTestFixture : IAsyncLifetime
{
    public string ApiKey { get; private set; } = string.Empty;
    public HumeClient? HumeClient { get; private set; }

    public Task InitializeAsync()
    {
        // Tests run from bin/Debug/net9.0/, so .env is 3 levels up
        Env.Load("../../../.env");

        var apiKey = Environment.GetEnvironmentVariable("TEST_HUME_API_KEY")
            ?? Environment.GetEnvironmentVariable("HUME_API_KEY");

        if (string.IsNullOrEmpty(apiKey))
        {
            throw new InvalidOperationException(
                "API key is required. Set TEST_HUME_API_KEY (CI) or HUME_API_KEY.");
        }

        ApiKey = apiKey;
        HumeClient = new HumeClient(ApiKey);

        return Task.CompletedTask;
    }

    public Task DisposeAsync()
    {
        return Task.CompletedTask;
    }
}

[Collection("EviTests")]
public class EviConnectionTests : IClassFixture<EviTestFixture>
{
    private readonly EviTestFixture _fixture;
    private readonly ITestOutputHelper _output;

    public EviConnectionTests(EviTestFixture fixture, ITestOutputHelper output)
    {
        _fixture = fixture;
        _output = output;
    }

    [Fact(DisplayName = "test fixture has API key")]
    public void TestFixture_HasApiKey()
    {
        Assert.False(string.IsNullOrEmpty(_fixture.ApiKey), "API key loaded");
        Assert.NotNull(_fixture.HumeClient);
    }

    [Fact(DisplayName = "connects w/ API key, starts a chat, receives a chatId, stays alive for 2 seconds")]
    public async Task Connects_StartsChat_ReceivesChatId_StaysAlive()
    {
        string? chatId = null;

        var chatApi = _fixture.HumeClient!.EmpathicVoice.CreateChatApi(new ChatApi.Options
        {
            ApiKey = _fixture.ApiKey,
            SessionSettings = new ConnectSessionSettings(),
        });

        chatApi.ChatMetadata.Subscribe(metadata =>
        {
            chatId = metadata.ChatId;
        });

        await chatApi.ConnectAsync();

        for (int i = 0; i < 100; i++)
        {
            if (chatId != null)
            {
                break;
            }
            await Task.Delay(100);
        }

        Assert.NotNull(chatId);
        Assert.False(string.IsNullOrEmpty(chatId), "Expected chat_id from chat_metadata");

        await Task.Delay(2000);

        await chatApi.DisposeAsync();
    }

    [Fact(DisplayName = "connects w/ API key, verifies sessionSettings are passed on connect()")]
    public async Task Connects_VerifiesSessionSettingsOnConnect()
    {
        var sessionSettings = new ConnectSessionSettings
        {
            SystemPrompt = "You are a helpful assistant that verifies sessionSettings are passed on connect()",
            Variables = new Dictionary<string, OneOf<string, double, bool>>
            {
                { "userName", OneOf<string, double, bool>.FromT0("John") },
                { "userAge", OneOf<string, double, bool>.FromT1(30.0) },
                { "isPremium", OneOf<string, double, bool>.FromT2(true) }
            }
        };

        string? chatId = null;

        var chatApi = _fixture.HumeClient!.EmpathicVoice.CreateChatApi(new ChatApi.Options
        {
            ApiKey = _fixture.ApiKey,
            SessionSettings = sessionSettings,
        });

        chatApi.ChatMetadata.Subscribe(metadata =>
        {
            chatId = metadata.ChatId;
        });

        await chatApi.ConnectAsync();

        for (int i = 0; i < 100; i++)
        {
            if (chatId != null)
            {
                break;
            }
            await Task.Delay(100);
        }

        Assert.NotNull(chatId);
        Assert.False(string.IsNullOrEmpty(chatId), "Expected chat_id from chat_metadata");

        await chatApi.DisposeAsync();

        await Task.Delay(2000);

        // Fetch chat events and verify session settings
        var events = new List<ReturnChatEvent>();

        var request = new ChatsListChatEventsRequest
        {
            PageNumber = 0,
            AscendingOrder = true
        };
        var pager = await _fixture.HumeClient!.EmpathicVoice.Chats.ListChatEventsAsync(chatId, request);

        await foreach (var evt in pager)
        {
            events.Add(evt);
        }

        var eventTypes = events.Select(e => e.Type.ToString()).ToList();

        var sessionSettingsEvent = events.FirstOrDefault(e => e.Type.ToString() == "SESSION_SETTINGS");

        if (sessionSettingsEvent == null)
        {
            var eventTypesStr = string.Join(", ", eventTypes);
            Assert.Fail(
                $"Expected SESSION_SETTINGS event but found none. Event types found: {eventTypesStr}. Total events: {events.Count}");
            return;
        }

        Assert.NotNull(sessionSettingsEvent.MessageText);

        var parsedSettings = JsonSerializer.Deserialize<JsonElement>(sessionSettingsEvent.MessageText!);

        Assert.Equal("session_settings", parsedSettings.GetProperty("type").GetString());

        Assert.Equal("You are a helpful assistant that verifies sessionSettings are passed on connect()", parsedSettings.GetProperty("system_prompt").GetString());

        var variables = parsedSettings.GetProperty("variables");
        Assert.Equal("John", variables.GetProperty("userName").GetString());
        Assert.Equal("30", variables.GetProperty("userAge").GetString());
        Assert.Equal("true", variables.GetProperty("isPremium").GetString());
    }

    [Fact(DisplayName = "connects w/ API key, verifies sessionSettings can be updated after connect()")]
    public async Task Connects_VerifiesSessionSettingsUpdatedAfterConnect()
    {
        string? chatId = null;

        var chatApi = _fixture.HumeClient!.EmpathicVoice.CreateChatApi(new ChatApi.Options
        {
            ApiKey = _fixture.ApiKey,
            SessionSettings = new ConnectSessionSettings(),
        });

        chatApi.ChatMetadata.Subscribe(metadata =>
        {
            chatId = metadata.ChatId;
        });

        await chatApi.ConnectAsync();

        for (int i = 0; i < 100; i++)
        {
            if (chatId != null)
            {
                break;
            }
            await Task.Delay(100);
        }

        Assert.NotNull(chatId);
        Assert.False(string.IsNullOrEmpty(chatId), "Expected chat_id from chat_metadata");

        var updatedSettings = new SessionSettings
        {
            SystemPrompt = "You are a helpful test assistant with updated system prompt"
        };
        await chatApi.Send(updatedSettings);

        await Task.Delay(1000);

        await chatApi.DisposeAsync();

        await Task.Delay(1000);

        var events = new List<ReturnChatEvent>();
        var request = new ChatsListChatEventsRequest
        {
            PageNumber = 0,
            AscendingOrder = true
        };
        var pager = await _fixture.HumeClient!.EmpathicVoice.Chats.ListChatEventsAsync(chatId, request);

        await foreach (var evt in pager)
        {
            events.Add(evt);
        }

        var sessionSettingsEvents = events.Where(e => (string)e.Type == "SESSION_SETTINGS").ToList();

        Assert.True(sessionSettingsEvents.Count >= 1,
            $"Expected at least 1 SESSION_SETTINGS event. Found event types: {string.Join(", ", events.Select(e => e.Type))}");

        var updatedEvent = sessionSettingsEvents.Last();

        Assert.NotNull(updatedEvent.MessageText);

        var parsedSettings = JsonSerializer.Deserialize<JsonElement>(updatedEvent.MessageText!);
        Assert.Equal("session_settings", parsedSettings.GetProperty("type").GetString());
        Assert.Equal("You are a helpful test assistant with updated system prompt",
            parsedSettings.GetProperty("system_prompt").GetString());
    }
}

[CollectionDefinition("EviTests")]
public class EviTestCollection : ICollectionFixture<EviTestFixture>
{
}


================================================
FILE: evi/evi-dotnet-quickstart/Program.cs
================================================
using System;
using System.IO;
using System.Linq;
using System.Threading.Tasks;
using DotNetEnv;
using Hume;
using Hume.EmpathicVoice;

Env.Load();

var apiKey = Environment.GetEnvironmentVariable("HUME_API_KEY")
    ?? throw new InvalidOperationException("HUME_API_KEY environment variable is required. See README.md for setup instructions.");
var client = new HumeClient(apiKey);

// Create a signal to wait for Chat Metadata
var chatMetadataReceived = new TaskCompletionSource<bool>();

// Create the ChatApi instance
var chatApi = client.EmpathicVoice.CreateChatApi(new ChatApi.Options
{
    ApiKey = apiKey,
    SessionSettings = new ConnectSessionSettings(),
});

// Subscribe to events
chatApi.AssistantMessage.Subscribe(message =>
{
    Console.WriteLine($"Assistant: {message.Message?.Content}");
});

chatApi.UserMessage.Subscribe(message =>
{
    Console.WriteLine($"User: {message.Message?.Content}");
});

chatApi.AudioOutput.Subscribe(audio =>
{
    Console.WriteLine($"Received audio chunk: {audio.Data?.Length ?? 0} bytes");
});

chatApi.ChatMetadata.Subscribe(metadata =>
{
    Console.WriteLine($"Chat Metadata - Chat ID: {metadata.ChatId}");
    chatMetadataReceived.TrySetResult(true);
});

// Connect to EVI
Console.WriteLine("Connecting to EVI...");
await chatApi.ConnectAsync();
Console.WriteLine("Connected!");

// Wait for Chat Metadata
Console.WriteLine("Waiting for Chat Metadata...");
await chatMetadataReceived.Task;
Console.WriteLine("Chat Metadata received.");

// Configure audio format (48kHz, 16-bit, mono PCM)
const int sampleRate = 48000;
const int channels = 1;

var sessionSettings = new SessionSettings
{
    Audio = new AudioConfiguration
    {
        SampleRate = sampleRate,
        Channels = channels
    }
};

Console.WriteLine("Sending session settings:");
Console.WriteLine($"  Encoding: {sessionSettings.Audio?.Encoding}");
Console.WriteLine($"  Sample Rate: {sessionSettings.Audio?.SampleRate} Hz");
Console.WriteLine($"  Channels: {sessionSettings.Audio?.Channels}");

await chatApi.Send(sessionSettings);
Console.WriteLine("Session settings sent successfully.");

Console.WriteLine("Starting audio transmission...");
await TransmitTestAudio(chatApi, "sample_input.pcm", sampleRate, channels);

// Wait for responses
Console.WriteLine("Waiting for responses...");
await Task.Delay(5000);

await chatApi.DisposeAsync();
Console.WriteLine("Done");

/// <summary>
/// Reads a PCM file and streams its audio data to EVI in real-time chunks.
/// </summary>
static async Task TransmitTestAudio(IChatApi chatApi, string filePath, int sampleRate, int channels)
{
    const int chunkDurationMs = 10;
    const int bytesPerSample = 2; // 16-bit audio
    int bytesPerChunk = sampleRate * bytesPerSample * channels * chunkDurationMs / 1000;

    // Step 1: Read PCM file
    var audioData = File.ReadAllBytes(filePath);
    Console.WriteLine($"Read {audioData.Length} bytes of audio from {filePath}");

    // Step 2: Split into chunks
    var chunks = SplitAudioIntoChunks(audioData, bytesPerChunk);

    // Step 3: Send chunks with delays
    await SendAudioChunksAsync(chatApi, chunks, chunkDurationMs);
}

static byte[][] SplitAudioIntoChunks(byte[] audioData, int bytesPerChunk)
{
    var chunks = new List<byte[]>();

    for (int offset = 0; offset < audioData.Length; offset += bytesPerChunk)
    {
        var chunkSize = Math.Min(bytesPerChunk, audioData.Length - offset);
        var chunk = audioData.Skip(offset).Take(chunkSize).ToArray();

        // Pad final chunk if needed
        if (chunk.Length < bytesPerChunk)
        {
            chunk = chunk.Concat(new byte[bytesPerChunk - chunk.Length]).ToArray();
        }

        chunks.Add(chunk);
    }

    Console.WriteLine($"Split audio into {chunks.Count} chunks");
    return chunks.ToArray();
}

static async Task SendAudioChunksAsync(IChatApi chatApi, byte[][] chunks, int chunkDurationMs)
{
    Console.WriteLine($"Sending {chunks.Length} audio chunks...");

    var lastLogTime = DateTime.Now;
    long bytesSent = 0;

    for (int i = 0; i < chunks.Length; i++)
    {
        var data = Convert.ToBase64String(chunks[i]);
        await chatApi.Send(new AudioInput { Data = data });

        bytesSent += chunks[i].Length;

        // Log progress every 5 seconds
        var now = DateTime.Now;
        if ((now - lastLogTime).TotalSeconds >= 5)
        {
            Console.WriteLine($"Sent {bytesSent} bytes ({i + 1}/{chunks.Length} chunks)");
            lastLogTime = now;
        }

        await Task.Delay(chunkDurationMs);
    }

    Console.WriteLine("Finished sending audio.");
    Console.WriteLine($"Total bytes sent: {bytesSent}");
}


================================================
FILE: evi/evi-dotnet-quickstart/README.md
================================================
<div align="center">
  <img src="https://storage.googleapis.com/hume-public-logos/hume/hume-banner.png">
  <h1>EVI | C# Quickstart</h1>
  <p>
    <strong>Jumpstart your development with Hume's Empathic Voice Interface!</strong>
  </p>
</div>

## Overview

This project demonstrates how to use [Hume AI](https://hume.ai)'s [Empathic Voice Interface (EVI)](https://dev.hume.ai/docs/empathic-voice-interface-evi/overview) with C#.

EVI is an emotionally intelligent voice AI that understands and responds to human emotions in real-time. It processes speech with emotional awareness, enabling more natural and empathetic conversations.

## Instructions

1. Clone this examples repository:

    ```shell
    git clone https://github.com/humeai/hume-api-examples
    cd hume-api-examples/evi/evi-dotnet-quickstart
    ```

2. Set up your API key:

    Your API key can be retrieved from the [Hume AI platform](https://app.hume.ai/keys). For detailed instructions, see our documentation on [getting your api keys](https://dev.hume.ai/docs/introduction/api-key).

    Create a `.env` file in this folder with your API key:

    ```
    HUME_API_KEY=your_api_key_here
    ```

    Or set it as an environment variable:

    **Windows (Command Prompt):**
    ```cmd
    set HUME_API_KEY=your_api_key_here
    ```

    **Windows (PowerShell):**
    ```powershell
    $env:HUME_API_KEY="your_api_key_here"
    ```

    **macOS/Linux:**
    ```bash
    export HUME_API_KEY=your_api_key_here
    ```

3. Install dependencies:

    ```shell
    dotnet restore
    ```

4. Run the project:

    ```shell
    dotnet run
    ```

## Features Demonstrated

This quickstart demonstrates key features of the EVI API:

- **WebSocket Connection**: Establishing a real-time connection to EVI
- **Audio Streaming**: Sending audio data in chunks for processing
- **Event Handling**: Subscribing to assistant messages, user transcriptions, and audio output
- **Session Management**: Configuring audio settings and managing chat metadata

## Requirements

- .NET 8.0 or later
- A Hume API key

## Output

The application connects to EVI, streams your audio file, and displays:
- Transcribed user speech
- Assistant responses
- Audio output notifications


================================================
FILE: evi/evi-dotnet-quickstart/evi-csharp-quickstart.csproj
================================================
<Project Sdk="Microsoft.NET.Sdk">

  <PropertyGroup>
    <OutputType>Exe</OutputType>
    <TargetFramework>net9.0</TargetFramework>
    <RootNamespace>EviCsharpQuickstart</RootNamespace>
    <ImplicitUsings>enable</ImplicitUsings>
    <Nullable>enable</Nullable>
    <LangVersion>latest</LangVersion>
  </PropertyGroup>

  <ItemGroup>
    <PackageReference Include="Hume" />
    <PackageReference Include="DotNetEnv" />
  </ItemGroup>

</Project>


================================================
FILE: evi/evi-dotnet-quickstart/evi-csharp-quickstart.tests.csproj
================================================
<Project Sdk="Microsoft.NET.Sdk">

  <PropertyGroup>
    <TargetFramework>net9.0</TargetFramework>
    <RootNamespace>EviCsharpQuickstart.Tests</RootNamespace>
    <ImplicitUsings>enable</ImplicitUsings>
    <Nullable>enable</Nullable>
    <LangVersion>latest</LangVersion>
    <IsPackable>false</IsPackable>
    <IsTestProject>true</IsTestProject>
  </PropertyGroup>

  <ItemGroup>
    <PackageReference Include="Microsoft.NET.Test.Sdk" />
    <PackageReference Include="xunit" />
    <PackageReference Include="xunit.runner.visualstudio">
      <IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets>
      <PrivateAssets>all</PrivateAssets>
    </PackageReference>
    <PackageReference Include="coverlet.collector">
      <IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets>
      <PrivateAssets>all</PrivateAssets>
    </PackageReference>
    <PackageReference Include="Moq" />
    <PackageReference Include="Hume" />
    <PackageReference Include="DotNetEnv" />
  </ItemGroup>

</Project>


================================================
FILE: evi/evi-flutter/.gitignore
================================================
# Miscellaneous
*.class
*.log
*.pyc
*.swp
.DS_Store
.atom/
.build/
.buildlog/
.history
.svn/
.swiftpm/
migrate_working_dir/

# Environment variables related
.env

# IntelliJ related
*.iml
*.ipr
*.iws
.idea/

# The .vscode folder contains launch configuration and tasks you configure in
# VS Code which you may wish to be included in version control, so this line
# is commented out by default.
#.vscode/

# Flutter/Dart/Pub related
**/doc/api/
**/ios/Flutter/.last_build_id
.dart_tool/
.flutter-plugins
.flutter-plugins-dependencies
.pub-cache/
.pub/
/build/

# Symbolication related
app.*.symbols

# Obfuscation related
app.*.map.json

# Android Studio will place build artifacts here
/android/app/debug
/android/app/profile
/android/app/release

/pubspec.lock


ios/Podfile.lock


================================================
FILE: evi/evi-flutter/README.md
================================================
<div align="center">
  <img src="https://storage.googleapis.com/hume-public-logos/hume/hume-banner.png">
  <h1>Empathic Voice Interface | Sample Flutter App</h1>
</div>

This project features a sample implementation of Hume's [Empathic Voice Interface](https://dev.hume.ai/docs/empathic-voice-interface-evi/overview) using Flutter. This is lightly adapted from the stater project provided by `flutter create`.

**Targets:** The example supports iOS, Android, and Web.

**Dependencies:** It uses the [record](https://pub.dev/packages/record) Flutter package for audio recording, and [audioplayers](https://pub.dev/packages/audioplayers) package for playback.

## Instructions

1. Clone this examples repository:

   ```shell
   git clone https://github.com/humeai/hume-api-examples
   cd hume-api-examples/evi/evi-flutter
   ```

2. Install Flutter (if needed) following the [official guide](https://docs.flutter.dev/get-started/install).

3. Install dependencies:

   ```shell
   flutter pub get
   ```

4. Set up your API key:

   You must authenticate to use the EVI API. Your API key can be retrieved from the [Hume AI platform](https://app.hume.ai/keys). For detailed instructions, see our documentation on [getting your api keys](https://dev.hume.ai/docs/introduction/api-key).

   This example uses [flutter_dotenv](https://pub.dev/packages/flutter_dotenv). Place your API key in a `.env` file at the root of your project.

   ```shell
   echo "HUME_API_KEY=your_api_key_here" > .env
   ```

   You can copy the `.env.example` file to use as a template.

   **Note:** the `HUME_API_KEY` environment variable is for development only. In a production flutter app you should avoid building your api key into the app -- the client should fetch an access token from an endpoint on your server. You should supply the `MY_SERVER_AUTH_URL` environment variable and uncomment the call to `fetchAccessToken` in `lib/main.dart`.

5. Specify an EVI configuration (Optional):

   EVI is pre-configured with a set of default values, which are automatically applied if you do not specify a configuration. The default configuration includes a preset voice and language model, but does not include a system prompt or tools. To customize these options, you will need to create and specify your own EVI configuration. To learn more, see our [configuration guide](https://dev.hume.ai/docs/empathic-voice-interface-evi/configuration/build-a-configuration).

   ```shell
   echo "HUME_CONFIG_ID=your_config_id_here" >> .env
   ```

6. Run the app:

   ```shell
   flutter run
   ```

7. If you are using the Android emulator, make sure to send audio to the emulator from the host.

![](host-audio-screenshot.png)

## Notes

- **Echo cancellation**. Echo cancellation is important for a good user experience using EVI. Without echo cancellation, EVI will detect its own speech as user interruptions, and will cut itself off and become incoherent. This flutter example _requests_ echo cancellation from the browser or the device's operating system, but echo cancellation is hardware-dependent and may not be provided in all environments.
  - Echo cancellation works consistently on physical iOS devices and on the web.
  - Echo cancellation works on some physical Android devices.
  - Echo cancellation doesn't seem to work using the iOS simulator or Android Emulator when forwarding audio from the host.
  - If you need to test using a simulator or emulator, or in an environment where echo cancellation is not provided, use headphones, or enable the mute button while EVI is speaking.


================================================
FILE: evi/evi-flutter/analysis_options.yaml
================================================
# This file configures the analyzer, which statically analyzes Dart code to
# check for errors, warnings, and lints.
#
# The issues identified by the analyzer are surfaced in the UI of Dart-enabled
# IDEs (https://dart.dev/tools#ides-and-editors). The analyzer can also be
# invoked from the command line by running `flutter analyze`.

# The following line activates a set of recommended lints for Flutter apps,
# packages, and plugins designed to encourage good coding practices.
include: package:flutter_lints/flutter.yaml

linter:
  # The lint rules applied to this project can be customized in the
  # section below to disable rules from the `package:flutter_lints/flutter.yaml`
  # included above or to enable additional rules. A list of all available lints
  # and their documentation is published at https://dart.dev/lints.
  #
  # Instead of disabling a lint rule for the entire project in the
  # section below, it can also be suppressed for a single line of code
  # or a specific dart file by using the `// ignore: name_of_lint` and
  # `// ignore_for_file: name_of_lint` syntax on the line or in the file
  # producing the lint.
  rules:
    # avoid_print: false  # Uncomment to disable the `avoid_print` rule
    # prefer_single_quotes: true  # Uncomment to enable the `prefer_single_quotes` rule

# Additional information about this file can be found at
# https://dart.dev/guides/language/analysis-options


================================================
FILE: evi/evi-flutter/android/.gitignore
================================================
gradle-wrapper.jar
/.gradle
/captures/
/gradlew
/gradlew.bat
/local.properties
GeneratedPluginRegistrant.java

# Remember to never publicly share your keystore.
# See https://flutter.dev/to/reference-keystore
key.properties
**/*.keystore
**/*.jks


================================================
FILE: evi/evi-flutter/android/app/build.gradle
================================================
plugins {
    id "com.android.application"
    id "kotlin-android"
    // The Flutter Gradle Plugin must be applied after the Android and Kotlin Gradle plugins.
    id "dev.flutter.flutter-gradle-plugin"
}

android {
    namespace = "com.example.evi_example"
    compileSdk = flutter.compileSdkVersion
    ndkVersion = flutter.ndkVersion

    compileOptions {
        sourceCompatibility = JavaVersion.VERSION_1_8
        targetCompatibility = JavaVersion.VERSION_1_8
    }

    kotlinOptions {
        jvmTarget = JavaVersion.VERSION_1_8
    }

    defaultConfig {
        // TODO: Specify your own unique Application ID (https://developer.android.com/studio/build/application-id.html).
        applicationId = "com.example.evi_example"
        // You can update the following values to match your application needs.
        // For more information, see: https://flutter.dev/to/review-gradle-config.
        minSdk = 23
        targetSdk = flutter.targetSdkVersion
        versionCode = flutter.versionCode
        versionName = flutter.versionName
    }

    buildTypes {
        release {
            // TODO: Add your own signing config for the release build.
            // Signing with the debug keys for now, so `flutter run --release` works.
            signingConfig = signingConfigs.debug
        }
    }
}

flutter {
    source = "../.."
}


================================================
FILE: evi/evi-flutter/android/app/src/debug/AndroidManifest.xml
================================================
<manifest xmlns:android="http://schemas.android.com/apk/res/android">
    <!-- The INTERNET permission is required for development. Specifically,
         the Flutter tool needs it to communicate with the running application
         to allow setting breakpoints, to provide hot reload, etc.
    -->
    <uses-permission android:name="android.permission.INTERNET"/>
</manifest>


================================================
FILE: evi/evi-flutter/android/app/src/main/AndroidManifest.xml
================================================
<manifest xmlns:android="http://schemas.android.com/apk/res/android">
    <application
        android:label="evi_example"
        android:name="${applicationName}"
        android:icon="@mipmap/ic_launcher">
        <activity
            android:name=".MainActivity"
            android:exported="true"
            android:launchMode="singleTop"
            android:taskAffinity=""
            android:theme="@style/LaunchTheme"
            android:configChanges="orientation|keyboardHidden|keyboard|screenSize|smallestScreenSize|locale|layoutDirection|fontScale|screenLayout|density|uiMode"
            android:hardwareAccelerated="true"
            android:windowSoftInputMode="adjustResize">
            <!-- Specifies an Android theme to apply to this Activity as soon as
                 the Android process has started. This theme is visible to the user
                 while the Flutter UI initializes. After that, this theme continues
                 to determine the Window background behind the Flutter UI. -->
            <meta-data
              android:name="io.flutter.embedding.android.NormalTheme"
              android:resource="@style/NormalTheme"
              />
            <intent-filter>
                <action android:name="android.intent.action.MAIN"/>
                <category android:name="android.intent.category.LAUNCHER"/>
            </intent-filter>
        </activity>
        <!-- Don't delete the meta-data below.
             This is used by the Flutter tool to generate GeneratedPluginRegistrant.java -->
        <meta-data
            android:name="flutterEmbedding"
            android:value="2" />
    </application>
    <!-- Required to query activities that can process text, see:
         https://developer.android.com/training/package-visibility and
         https://developer.android.com/reference/android/content/Intent#ACTION_PROCESS_TEXT.

         In particular, this is used by the Flutter engine in io.flutter.plugin.text.ProcessTextPlugin. -->
    <queries>
        <intent>
            <action android:name="android.intent.action.PROCESS_TEXT"/>
            <data android:mimeType="text/plain"/>
        </intent>
    </queries>
</manifest>


================================================
FILE: evi/evi-flutter/android/app/src/main/kotlin/com/example/evi_example/MainActivity.kt
================================================
package com.example.evi_example

import io.flutter.embedding.android.FlutterActivity

class MainActivity: FlutterActivity()


================================================
FILE: evi/evi-flutter/android/app/src/main/res/drawable/launch_background.xml
================================================
<?xml version="1.0" encoding="utf-8"?>
<!-- Modify this file to customize your launch splash screen -->
<layer-list xmlns:android="http://schemas.android.com/apk/res/android">
    <item android:drawable="@android:color/white" />

    <!-- You can insert your own image assets here -->
    <!-- <item>
        <bitmap
            android:gravity="center"
            android:src="@mipmap/launch_image" />
    </item> -->
</layer-list>


================================================
FILE: evi/evi-flutter/android/app/src/main/res/drawable-v21/launch_background.xml
================================================
<?xml version="1.0" encoding="utf-8"?>
<!-- Modify this file to customize your launch splash screen -->
<layer-list xmlns:android="http://schemas.android.com/apk/res/android">
    <item android:drawable="?android:colorBackground" />

    <!-- You can insert your own image assets here -->
    <!-- <item>
        <bitmap
            android:gravity="center"
            android:src="@mipmap/launch_image" />
    </item> -->
</layer-list>


================================================
FILE: evi/evi-flutter/android/app/src/main/res/values/styles.xml
================================================
<?xml version="1.0" encoding="utf-8"?>
<resources>
    <!-- Theme applied to the Android Window while the process is starting when the OS's Dark Mode setting is off -->
    <style name="LaunchTheme" parent="@android:style/Theme.Light.NoTitleBar">
        <!-- Show a splash screen on the activity. Automatically removed when
             the Flutter engine draws its first frame -->
        <item name="android:windowBackground">@drawable/launch_background</item>
    </style>
    <!-- Theme applied to the Android Window as soon as the process has started.
         This theme determines the color of the Android Window while your
         Flutter UI initializes, as well as behind your Flutter UI while its
         running.

         This Theme is only used starting with V2 of Flutter's Android embedding. -->
    <style name="NormalTheme" parent="@android:style/Theme.Light.NoTitleBar">
        <item name="android:windowBackground">?android:colorBackground</item>
    </style>
</resources>


================================================
FILE: evi/evi-flutter/android/app/src/main/res/values-night/styles.xml
================================================
<?xml version="1.0" encoding="utf-8"?>
<resources>
    <!-- Theme applied to the Android Window while the process is starting when the OS's Dark Mode setting is on -->
    <style name="LaunchTheme" parent="@android:style/Theme.Black.NoTitleBar">
        <!-- Show a splash screen on the activity. Automatically removed when
             the Flutter engine draws its first frame -->
        <item name="android:windowBackground">@drawable/launch_background</item>
    </style>
    <!-- Theme applied to the Android Window as soon as the process has started.
         This theme determines the color of the Android Window while your
         Flutter UI initializes, as well as behind your Flutter UI while its
         running.

         This Theme is only used starting with V2 of Flutter's Android embedding. -->
    <style name="NormalTheme" parent="@android:style/Theme.Black.NoTitleBar">
        <item name="android:windowBackground">?android:colorBackground</item>
    </style>
</resources>


================================================
FILE: evi/evi-flutter/android/app/src/profile/AndroidManifest.xml
================================================
<manifest xmlns:android="http://schemas.android.com/apk/res/android">
    <!-- The INTERNET permission is required for development. Specifically,
         the Flutter tool needs it to communicate with the running application
         to allow setting breakpoints, to provide hot reload, etc.
    -->
    <uses-permission android:name="android.permission.INTERNET"/>
    <uses-permission android:name="android.permission.RECORD_AUDIO" />
</manifest>


================================================
FILE: evi/evi-flutter/android/build.gradle
================================================
allprojects {
    repositories {
        google()
        mavenCentral()
    }
}

rootProject.buildDir = "../build"
subprojects {
    project.buildDir = "${rootProject.buildDir}/${project.name}"
}
subprojects {
    project.evaluationDependsOn(":app")
}

tasks.register("clean", Delete) {
    delete rootProject.buildDir
}


================================================
FILE: evi/evi-flutter/android/gradle/wrapper/gradle-wrapper.properties
================================================
distributionBase=GRADLE_USER_HOME
distributionPath=wrapper/dists
zipStoreBase=GRADLE_USER_HOME
zipStorePath=wrapper/dists
distributionUrl=https\://services.gradle.org/distributions/gradle-8.3-all.zip


================================================
FILE: evi/evi-flutter/android/gradle.properties
================================================
org.gradle.jvmargs=-Xmx4G -XX:MaxMetaspaceSize=2G -XX:+HeapDumpOnOutOfMemoryError
android.useAndroidX=true
android.enableJetifier=true


================================================
FILE: evi/evi-flutter/android/settings.gradle
================================================
pluginManagement {
    def flutterSdkPath = {
        def properties = new Properties()
        file("local.properties").withInputStream { properties.load(it) }
        def flutterSdkPath = properties.getProperty("flutter.sdk")
        assert flutterSdkPath != null, "flutter.sdk not set in local.properties"
        return flutterSdkPath
    }()

    includeBuild("$flutterSdkPath/packages/flutter_tools/gradle")

    repositories {
        google()
        mavenCentral()
        gradlePluginPortal()
    }
}

plugins {
    id "dev.flutter.flutter-plugin-loader" version "1.0.0"
    id "com.android.application" version "8.1.0" apply false
    id "org.jetbrains.kotlin.android" version "1.8.22" apply false
}

include ":app"


================================================
FILE: evi/evi-flutter/audio/.gitignore
================================================
# Miscellaneous
*.class
*.log
*.pyc
*.swp
.DS_Store
.atom/
.buildlog/
.history
.svn/
migrate_working_dir/

# IntelliJ related
*.iml
*.ipr
*.iws
.idea/

# The .vscode folder contains launch configuration and tasks you configure in
# VS Code which you may wish to be included in version control, so this line
# is commented out by default.
#.vscode/

# Flutter/Dart/Pub related
# Libraries should not include pubspec.lock, per https://dart.dev/guides/libraries/private-files#pubspeclock.
/pubspec.lock
**/doc/api/
.dart_tool/
build/


================================================
FILE: evi/evi-flutter/audio/.metadata
================================================
# This file tracks properties of this Flutter project.
# Used by Flutter tool to assess capabilities and perform upgrades etc.
#
# This file should be version controlled and should not be manually edited.

version:
  revision: "nixpkgs000000000000000000000000000000000"
  channel: "stable"

project_type: plugin

# Tracks metadata for the flutter migrate command
migration:
  platforms:
    - platform: root
      create_revision: nixpkgs000000000000000000000000000000000
      base_revision: nixpkgs000000000000000000000000000000000
    - platform: ios
      create_revision: nixpkgs000000000000000000000000000000000
      base_revision: nixpkgs000000000000000000000000000000000

  # User provided section

  # List of Local paths (relative to this file) that should be
  # ignored by the migrate tool.
  #
  # Files that are not part of the templates will be ignored by default.
  unmanaged_files:
    - 'lib/main.dart'
    - 'ios/Runner.xcodeproj/project.pbxproj'


================================================
FILE: evi/evi-flutter/audio/ios/.gitignore
================================================
.idea/
.vagrant/
.sconsign.dblite
.svn/

.DS_Store
*.swp
profile

DerivedData/
build/
GeneratedPluginRegistrant.h
GeneratedPluginRegistrant.m

.generated/

*.pbxuser
*.mode1v3
*.mode2v3
*.perspectivev3

!default.pbxuser
!default.mode1v3
!default.mode2v3
!default.perspectivev3

xcuserdata

*.moved-aside

*.pyc
*sync/
Icon?
.tags*

/Flutter/Generated.xcconfig
/Flutter/ephemeral/
/Flutter/flutter_export_environment.sh


================================================
FILE: evi/evi-flutter/audio/ios/Assets/.gitkeep
================================================


================================================
FILE: evi/evi-flutter/audio/ios/Classes/AudioPlugin.swift
================================================
import AVFoundation
import Flutter
import UIKit

public class AudioPlugin: NSObject, FlutterPlugin {
    private lazy var microphone: Microphone = {
        return Microphone()
    }()
    private var soundPlayer: SoundPlayer

    private var eventChannel: FlutterEventChannel?
    private var eventSink: FlutterEventSink?

    private func sendError(_ message: String) {
        DispatchQueue.main.async {
            self.eventSink?([
                "type": "error",
                "message": message,
            ])
        }
    }
    private func sendAudio(_ base64String: String) {
        DispatchQueue.main.async {
            self.eventSink?([
                "type": "audio",
                "data": base64String,
            ])
        }
    }

    public static func register(with registrar: FlutterPluginRegistrar) {
        let methodChannel = FlutterMethodChannel(
            name: "audio",
            binaryMessenger: registrar.messenger()
        )

        let eventChannel = FlutterEventChannel(
            name: "audio/events",
            binaryMessenger: registrar.messenger()
        )

        let instance = AudioPlugin()

        registrar.addMethodCallDelegate(instance, channel: methodChannel)

        eventChannel.setStreamHandler(instance)

        instance.eventChannel = eventChannel
    }

    override init() {
        self.soundPlayer = SoundPlayer()
        super.init()

        self.soundPlayer.onError { [weak self] error in
            guard let self = self else { return }
            guard let eventSink = self.eventSink else { return }

            switch error {
            case .invalidBase64String:
                sendError("Invalid base64 string")
            case .couldNotPlayAudio:
                sendError("Could not play audio")
            case .decodeError(let details):
                sendError(details)
            }
        }
    }

    public func handle(_ call: FlutterMethodCall, result: @escaping FlutterResult) {
        switch call.method {
        case "getPermissions":
            Task {
               await getPermissions()
            }
        case "startRecording":
            do {
                try ensureInittedAudioSession()
                try microphone.startRecording(onBase64EncodedAudio: sendAudio)
                result(nil)
            } catch {
                result(
                    FlutterError(
                        code: "START_RECORDING_ERROR",
                        message: error.localizedDescription,
                        details: nil
                    )
                )
            }

        case "enqueueAudio":
            guard let base64String = call.arguments as? String else {
                result(
                    FlutterError(
                        code: "INVALID_ARGUMENTS",
                        message: "Expected base64 string",
                        details: nil
                    ))
                return
            }
            Task {
                do {
                    try await soundPlayer.enqueueAudio(base64String)
                } catch {
                    sendError(error.localizedDescription)
                }
            }
            result(nil)

        case "stopPlayback":
            soundPlayer.stopPlayback()
            result(nil)

        case "stopRecording":
            microphone.stopRecording()
            result(nil)

        default:
            result(FlutterMethodNotImplemented)
        }
    }

    private func getPermissions() async -> Bool {
        let audioSession = AVAudioSession.sharedInstance()
        switch audioSession.recordPermission {
        case .granted:
            return true
        case .denied:
            return false
        case .undetermined:
            return await withCheckedContinuation { continuation in
                audioSession.requestRecordPermission { granted in
                    continuation.resume(returning: granted)
                }
            }
        @unknown default:
            sendError("Unknown permission state")
            return false
        }
    }

    private var inittedAudioSession = false
    private func ensureInittedAudioSession() throws {
        if inittedAudioSession { return }

        let audioSession = AVAudioSession.sharedInstance()
        try audioSession.setCategory(
            .playAndRecord,
            mode: .voiceChat,
            options: [.defaultToSpeaker, .allowBluetooth, .allowBluetoothA2DP]
        )
        try audioSession.setActive(true)
        inittedAudioSession = true
    }
}

extension AudioPlugin: FlutterStreamHandler {
    public func onListen(
        withArguments arguments: Any?,
        eventSink events: @escaping FlutterEventSink
    ) -> FlutterError? {
        self.eventSink = events
        return nil
    }

    public func onCancel(withArguments arguments: Any?) -> FlutterError? {
        self.eventSink = nil
        return nil
    }
}

================================================
FILE: evi/evi-flutter/audio/ios/Classes/Microphone.swift
================================================
import AVFoundation
import Foundation

public enum MicrophoneError: Error {
    case conversionFailed(details: String)
    case setupFailed(details: String)
}

public class Microphone {
    public static let sampleRate: Double = 44100
    public static let isLinear16PCM: Bool = true
    private static let desiredInputFormat = AVAudioFormat(commonFormat: .pcmFormatInt16, sampleRate: sampleRate, channels: 1, interleaved: false)!
    
    private var audioEngine: AVAudioEngine?
    private var inputNode: AVAudioInputNode?
    private var isMuted: Bool = false
    private var onError: ((MicrophoneError) -> Void)?
    
    public init() {
        self.isMuted = false
    }
    
    public func onError(_ onError: @escaping (MicrophoneError) -> Void) {
        self.onError = onError
    }
    
    public func mute() {
        self.isMuted = true
    }
    
    public func unmute() {
        self.isMuted = false
    }
    
    private func setupAudioEngine() throws {
        self.audioEngine = AVAudioEngine()
        guard let audioEngine = self.audioEngine else {
            throw MicrophoneError.setupFailed(details: "Failed to create audio engine")
        }
        
        self.inputNode = audioEngine.inputNode
        guard let inputNode = self.inputNode else {
            throw MicrophoneError.setupFailed(details: "Failed to get input node")
        }
        
        let outputNode: AVAudioOutputNode = audioEngine.outputNode
        let mainMixerNode: AVAudioMixerNode = audioEngine.mainMixerNode
        audioEngine.connect(mainMixerNode, to: outputNode, format: nil)
        
        try inputNode.setVoiceProcessingEnabled(true)
        try outputNode.setVoiceProcessingEnabled(true)
        
        if #available(iOS 17.0, *) {
            let duckingConfig = AVAudioVoiceProcessingOtherAudioDuckingConfiguration(enableAdvancedDucking: false, duckingLevel: .max)
            inputNode.voiceProcessingOtherAudioDuckingConfiguration = duckingConfig
        }
    }
    
    public func startRecording(onBase64EncodedAudio: @escaping (String) -> Void) throws {
        if audioEngine == nil {
            try setupAudioEngine()
        }
        
        guard let audioEngine = self.audioEngine, let inputNode = self.inputNode else {
            throw MicrophoneError.setupFailed(details: "Audio engine not properly initialized")
        }
        
        let nativeInputFormat = inputNode.inputFormat(forBus: 0)
        let inputBufferSize = UInt32(nativeInputFormat.sampleRate * 0.1)
        
        inputNode.installTap(onBus: 0, bufferSize: inputBufferSize, format: nativeInputFormat) { (buffer, time) in
            let convertedBuffer = AVAudioPCMBuffer(pcmFormat: Microphone.desiredInputFormat, frameCapacity: 1024)!
            
            var error: NSError? = nil

            if self.isMuted {
                let silence = Data(repeating: 0, count: Int(convertedBuffer.frameCapacity) * Int(convertedBuffer.format.streamDescription.pointee.mBytesPerFrame))
                onBase64EncodedAudio(silence.base64EncodedString())
                return
            }
            
            let inputAudioConverter = AVAudioConverter(from: nativeInputFormat, to: Microphone.desiredInputFormat)!
            let status = inputAudioConverter.convert(to: convertedBuffer, error: &error, withInputFrom: {inNumPackets, outStatus in
                outStatus.pointee = .haveData
                buffer.frameLength = inNumPackets
                return buffer
            })
            
            if status == .haveData {
                let byteLength = Int(convertedBuffer.frameLength) * Int(convertedBuffer.format.streamDescription.pointee.mBytesPerFrame)
                let audioData = Data(bytes: convertedBuffer.audioBufferList.pointee.mBuffers.mData!, count: byteLength)
                let base64String = audioData.base64EncodedString()
                onBase64EncodedAudio(base64String)
                return
            }
            if error != nil {
                self.onError?(MicrophoneError.conversionFailed(details: error!.localizedDescription))
                return
            }
            self.onError?(MicrophoneError.conversionFailed(details: "Unexpected status during audio conversion: \(status)"))
        }
        
        if (!audioEngine.isRunning) {
            try audioEngine.start()
        }
    }
    
    public func stopRecording() {
        audioEngine?.stop()
        inputNode?.removeTap(onBus: 0)
    }
}

================================================
FILE: evi/evi-flutter/audio/ios/Classes/SoundPlayer.swift
================================================
import AVFoundation
import Foundation

public enum SoundPlayerError: Error {
    case invalidBase64String
    case couldNotPlayAudio
    case decodeError(details: String)
}

public class SoundPlayer: NSObject, AVAudioPlayerDelegate {
    private var audioPlayer: AVAudioPlayer?

    // EVI can send audio output messages faster than they can be played back.
    // It is important to buffer them in a queue so as not to cut off a clip of
    // playing audio with a more recent clip.
    private var audioQueue: [Data] = []  // Queue for audio segments

    private var isPlaying: Bool = false  // Tracks if audio is currently playing
    private var onError: ((SoundPlayerError) -> Void)?

    public func onError(_ onError: @escaping (SoundPlayerError) -> Void) {
        self.onError = onError
    }

    public func stopPlayback() {
        self.audioPlayer?.stop()
        self.audioPlayer = nil
        self.audioQueue.removeAll()  // Clear the queue
        isPlaying = false
    }

    public func enqueueAudio(_ base64String: String) async throws {
        guard let data = Data(base64Encoded: base64String) else {
            throw SoundPlayerError.invalidBase64String
        }
        audioQueue.append(data)
        // If not already playing, start playback
        if !isPlaying {
            do {
                try playNextInQueue()
            } catch {
                if let soundError = error as? SoundPlayerError {
                    self.onError?(soundError)
                } else {
                    self.onError?(SoundPlayerError.decodeError(details: error.localizedDescription))
                }
            }
        }
    }

    private func playNextInQueue() throws {
        guard !audioQueue.isEmpty else {
            isPlaying = false
            return
        }

        isPlaying = true
        let data = audioQueue.removeFirst()

        self.audioPlayer = try AVAudioPlayer(data: data, fileTypeHint: AVFileType.wav.rawValue)

        let session: AVAudioSession = AVAudioSession.sharedInstance()
        self.audioPlayer!.prepareToPlay()
        self.audioPlayer!.delegate = self
        let result = audioPlayer!.play()

        let isSpeaker =
            session.currentRoute.outputs.first?.portType == AVAudioSession.Port.builtInSpeaker
        if isSpeaker {
            // This is to work around an issue with AVFoundation and voiceProcessing: https://forums.developer.apple.com/forums/thread/721535
            self.audioPlayer!.volume = 1.0
            try session.overrideOutputAudioPort(.none)
            try session.overrideOutputAudioPort(.speaker)
        }
        if !result {
            throw SoundPlayerError.couldNotPlayAudio
        }
    }

    public func audioPlayerDidFinishPlaying(_ player: AVAudioPlayer, successfully flag: Bool) {
        do {
            try playNextInQueue()
        } catch {
            self.onError?(error as! SoundPlayerError)
        }
    }

    public func audioPlayerDecodeErrorDidOccur(_ player: AVAudioPlayer, error: Error?) {
        self.onError?(
            SoundPlayerError.decodeError(details: error?.localizedDescription ?? "Unknown error"))
    }
}


================================================
FILE: evi/evi-flutter/audio/ios/Resources/PrivacyInfo.xcprivacy
================================================
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0">
<dict>
	<key>NSPrivacyTrackingDomains</key>
	<array/>
	<key>NSPrivacyAccessedAPITypes</key>
	<array/>
	<key>NSPrivacyCollectedDataTypes</key>
	<array/>
	<key>NSPrivacyTracking</key>
	<false/>
</dict>
</plist>


================================================
FILE: evi/evi-flutter/audio/ios/audio.podspec
================================================
#
# To learn more about a Podspec see http://guides.cocoapods.org/syntax/podspec.html.
# Run `pod lib lint audio.podspec` to validate before publishing.
#
Pod::Spec.new do |s|
  s.name             = 'audio'
  s.version          = '0.0.1'
  s.summary          = 'A new Flutter plugin project.'
  s.description      = <<-DESC
A new Flutter plugin project.
                       DESC
  s.homepage         = 'http://example.com'
  s.license          = { :file => '../LICENSE' }
  s.author           = { 'Your Company' => 'email@example.com' }
  s.source           = { :path => '.' }
  s.source_files = 'Classes/**/*'
  s.dependency 'Flutter'
  s.platform = :ios, '13.0'

  # Flutter.framework does not contain a i386 slice.
  s.pod_target_xcconfig = { 'DEFINES_MODULE' => 'YES', 'EXCLUDED_ARCHS[sdk=iphonesimulator*]' => 'i386' }
  s.swift_version = '5.0'

  # If your plugin requires a privacy manifest, for example if it uses any
  # required reason APIs, update the PrivacyInfo.xcprivacy file to describe your
  # plugin's privacy impact, and then uncomment this line. For more information,
  # see https://developer.apple.com/documentation/bundleresources/privacy_manifest_files
  # s.resource_bundles = {'audio_privacy' => ['Resources/PrivacyInfo.xcprivacy']}
end


================================================
FILE: evi/evi-flutter/audio/lib/audio.dart
================================================
import 'dart:async';
import 'dart:convert';
import 'dart:io' show Platform;
import 'package:flutter/foundation.dart' show kIsWeb;
import 'package:flutter/services.dart';
import 'package:audio/dart_audio.dart';

class Audio {
  static final Audio _instance = Audio._internal();
  factory Audio() => _instance;

  static const MethodChannel channel = MethodChannel('audio');
  static const EventChannel _eventChannel = EventChannel('audio/events');

  DartAudio? _dartAudio;

  Audio._internal() {
    if (kIsWeb || !Platform.isIOS) {
      _dartAudio = DartAudio();
    } else {
      _eventChannel.receiveBroadcastStream().listen(
        (event) {
          if (event is Map) {
            if (event['type'] == 'audio') {
              final audioData = event['data'] as String;
              _audioController.add(audioData);
            } else if (event['type'] == 'error') {
              final error = event['message'] as String;
              _audioController.addError(error);
            }
          }
        },
        onError: (error) {
          _audioController.addError(error);
        },
      );
    }
  }

  final _audioController = StreamController<String>.broadcast();
  Stream<String> get audioStream => _audioController.stream;

  Future<void> startRecording() async {
    if (_dartAudio != null) {
      (await _dartAudio!.startRecording()).listen(
        (data) {
          _audioController.add(base64Encode(data));
        },
        onError: (error) {
          _audioController.addError(error);
        },
      );
    } else {
      try {
        await channel.invokeMethod('startRecording');
      } catch (error) {
        _audioController.addError(error);
      }
    }
  }

  Future<void> stopRecording() async {
    if (_dartAudio != null) {
      _dartAudio!.stopRecording();
    } else {
      return channel.invokeMethod('stopRecording');
    }
  }

  Future<void> mute() async {
    if (_dartAudio != null) {
      _dartAudio!.mute();
    } else {
      return await channel.invokeMethod('mute');
    }
  }

  Future<void> unmute() async {
    if (_dartAudio != null) {
      _dartAudio!.unmute();
    } else {
      return await channel.invokeMethod('unmute');
    }
  }

  Future<void> enqueueAudio(String base64String) async {
    if (_dartAudio != null) {
      _dartAudio!.enqueueAudioSegment(base64String);
    } else {
      print("Invoking enqueueAudio");
      return channel.invokeMethod('enqueueAudio', base64String);
    }
  }

  Future<void> stopPlayback() async {
    if (_dartAudio != null) {
      _dartAudio!.stopPlayback();
    } else {
      await channel.invokeMethod('stopPlayback');
    }
  }

  Future<void> dispose() async {
    _audioController.close();
    await _dartAudio?.dispose();
  }
}


================================================
FILE: evi/evi-flutter/audio/lib/audio_method_channel.dart
================================================
import 'package:flutter/foundation.dart';
import 'package:flutter/services.dart';

import 'audio_platform_interface.dart';

/// An implementation of [AudioPlatform] that uses method channels.
class MethodChannelAudio extends AudioPlatform {
  /// The method channel used to interact with the native platform.
  @visibleForTesting
  final methodChannel = const MethodChannel('audio');

  @override
  Future<String?> getPlatformVersion() async {
    final version = await methodChannel.invokeMethod<String>('getPlatformVersion');
    return version;
  }
}


================================================
FILE: evi/evi-flutter/audio/lib/audio_platform_interface.dart
================================================
import 'package:plugin_platform_interface/plugin_platform_interface.dart';

import 'audio_method_channel.dart';

abstract class AudioPlatform extends PlatformInterface {
  /// Constructs a AudioPlatform.
  AudioPlatform() : super(token: _token);

  static final Object _token = Object();

  static AudioPlatform _instance = MethodChannelAudio();

  /// The default instance of [AudioPlatform] to use.
  ///
  /// Defaults to [MethodChannelAudio].
  static AudioPlatform get instance => _instance;

  /// Platform-specific implementations should set this with their own
  /// platform-specific class that extends [AudioPlatform] when
  /// they register themselves.
  static set instance(AudioPlatform instance) {
    PlatformInterface.verifyToken(instance, _token);
    _instance = instance;
  }

  Future<String?> getPlatformVersion() {
    throw UnimplementedError('platformVersion() has not been implemented.');
  }
}


================================================
FILE: evi/evi-flutter/audio/lib/dart_audio.dart
================================================
import 'dart:async';
import 'dart:convert';

import 'package:audioplayers/audioplayers.dart';
import 'package:record/record.dart';

class DartAudio {
  // Playback stuff
  final AudioPlayer _audioPlayer = AudioPlayer();
  final List<Source> _playbackAudioQueue = [];

  // Recording stuff
  final AudioRecorder _recorder = AudioRecorder();
  final config = const RecordConfig(
    encoder: AudioEncoder.pcm16bits,
    bitRate: 48000 *
        2 *
        16, // 48000 samples per second * 2 channels (stereo) * 16 bits per sample
    sampleRate: 48000,
    numChannels: 1,
    autoGain: true,
    echoCancel: true,
    noiseSuppress: true,
  );
  bool _isMuted = false;
  bool _isRecording = false;
  StreamSubscription<List<int>>? _recordSubscription;

  DartAudio() {
    final AudioContext audioContext = AudioContext(
      android: const AudioContextAndroid(
        isSpeakerphoneOn: false,
        audioMode: AndroidAudioMode.normal,
        stayAwake: false,
        contentType: AndroidContentType.speech,
        usageType: AndroidUsageType.voiceCommunication,
        audioFocus: AndroidAudioFocus.gain,
      ),
    );
    AudioPlayer.global.setAudioContext(audioContext);

    _audioPlayer.onPlayerComplete.listen((event) {
      _playNextAudioSegment();
    });
  }

  // -------------------------
  // Playback fallback
  // -------------------------
  void enqueueAudioSegment(String base64Bytes) {
    final audioSegment = BytesSource(base64Decode(base64Bytes));
    if (_audioPlayer.state == PlayerState.playing) {
      _playbackAudioQueue.add(audioSegment);
    } else {
      _audioPlayer.play(audioSegment);
    }
  }

  void stopPlayback() {
    _playbackAudioQueue.clear();
    _audioPlayer.stop();
  }

  void _playNextAudioSegment() {
    if (_playbackAudioQueue.isNotEmpty) {
      final audioSegment = _playbackAudioQueue.removeAt(0);
      _audioPlayer.play(audioSegment);
    }
  }

  // ----------------------------------------------------------------
  // (A) Recording fallback: returning a Stream of chunked bytes
  // ----------------------------------------------------------------
  /// Starts recording, returning a stream of byte chunks. 
  /// You can specify the config (sampleRate, bitRate, etc.) and a 
  /// "chunkSize" in bytes. Each chunk of raw audio is emitted in the stream.
  Future<Stream<List<int>>> startRecording() async {
    if (_isRecording) {
      throw Exception('Already recording');
    }
    // Request mic permission
    if (!await _recorder.hasPermission()) {
      throw Exception('No mic permission');
    }

    // We'll create a StreamController to push chunked data
    final controller = StreamController<List<int>>();


    // Start streaming from the record package
    final recordStream = await _recorder.startStream(config);

    _isRecording = true;
    _isMuted = false;
    final audioInputBuffer = <int>[];

    // Calculate chunk size in bytes, e.g., config.bitRate / 10 for ~100ms
    final chunkSize = config.bitRate ~/ 10; 

    _recordSubscription = recordStream.listen(
      (data) {
        if (!_isMuted) {
          // If not muted, we add the new data
          audioInputBuffer.addAll(data);

          if (audioInputBuffer.length >= chunkSize) {
            // If the entire chunk is silent, ignore it if you want
            final bufferWasEmpty = audioInputBuffer.every((byte) => byte == 0);
            if (!bufferWasEmpty) {
              // Emit this chunk to the stream
              controller.add(List<int>.from(audioInputBuffer));
            }
            audioInputBuffer.clear();
          }
        } else {
          // If muted, optionally do nothing or emit zeros, etc.
        }
      },
      onError: (err) => controller.addError(err),
      onDone: () {
        _isRecording = false;
        controller.close();
      },
    );

    return controller.stream;
  }

  Future<void> stopRecording() async {
    if (_isRecording) {
      await _recordSubscription?.cancel();
      _recordSubscription = null;
      await _recorder.stop();
      _isRecording = false;
      _isMuted = false;
    }
  }

  Future<void> mute() async {
    _isMuted = true;
  }

  Future<void> unmute() async {
    _isMuted = false;
  }

  // If you want a simpler "just record to a file," 
  // you could do it in separate methods. But this is 
  // a chunked streaming approach, same as your original code.

  // ----------------------------------------------------------------
  // Cleanup
  // ----------------------------------------------------------------
  Future<void> dispose() async {
    await _audioPlayer.dispose();
    await stopRecording(); // stop + unsub
  }
}


================================================
FILE: evi/evi-flutter/audio/pubspec.yaml
================================================
name: audio
description: "A new Flutter plugin project."
version: 0.0.1
homepage:

environment:
  sdk: ^3.5.4
  flutter: '>=3.3.0'

dependencies:
  flutter:
    sdk: flutter
  audioplayers: ^6.1.0
  record: ^5.1.2
  plugin_platform_interface: ^2.0.2

dev_dependencies:
  flutter_test:
    sdk: flutter
  flutter_lints: ^4.0.0

# For information on the generic Dart part of this file, see the
# following page: https://dart.dev/tools/pub/pubspec

# The following section is specific to Flutter packages.
flutter:
  # This section identifies this Flutter project as a plugin project.
  # The 'pluginClass' specifies the class (in Java, Kotlin, Swift, Objective-C, etc.)
  # which should be registered in the plugin registry. This is required for
  # using method channels.
  # The Android 'package' specifies package in which the registered class is.
  # This is required for using method channels on Android.
  # The 'ffiPlugin' specifies that native code should be built and bundled.
  # This is required for using `dart:ffi`.
  # All these are used by the tooling to maintain consistency when
  # adding or updating assets for this project.
  plugin:
    platforms:
      ios:
        pluginClass: AudioPlugin

  # To add assets to your plugin package, add an assets section, like this:
  # assets:
  #   - images/a_dot_burr.jpeg
  #   - images/a_dot_ham.jpeg
  #
  # For details regarding assets in packages, see
  # https://flutter.dev/to/asset-from-package
  #
  # An image asset can refer to one or more resolution-specific "variants", see
  # https://flutter.dev/to/resolution-aware-images

  # To add custom fonts to your plugin package, add a fonts section here,
  # in this "flutter" section. Each entry in this list should have a
  # "family" key with the font family name, and a "fonts" key with a
  # list giving the asset and other descriptors for the font. For
  # example:
  # fonts:
  #   - family: Schyler
  #     fonts:
  #       - asset: fonts/Schyler-Regular.ttf
  #       - asset: fonts/Schyler-Italic.ttf
  #         style: italic
  #   - family: Trajan Pro
  #     fonts:
  #       - asset: fonts/TrajanPro.ttf
  #       - asset: fonts/TrajanPro_Bold.ttf
  #         weight: 700
  #
  # For details regarding fonts in packages, see
  # https://flutter.dev/to/font-from-package


================================================
FILE: evi/evi-flutter/audio/test/audio_method_channel_test.dart
================================================
import 'package:flutter/services.dart';
import 'package:flutter_test/flutter_test.dart';
import 'package:audio/audio_method_channel.dart';

void main() {
  TestWidgetsFlutterBinding.ensureInitialized();

  MethodChannelAudio platform = MethodChannelAudio();
  const MethodChannel channel = MethodChannel('audio');

  setUp(() {
    TestDefaultBinaryMessengerBinding.instance.defaultBinaryMessenger.setMockMethodCallHandler(
      channel,
      (MethodCall methodCall) async {
        return '42';
      },
    );
  });

  tearDown(() {
    TestDefaultBinaryMessengerBinding.instance.defaultBinaryMessenger.setMockMethodCallHandler(channel, null);
  });

  test('getPlatformVersion', () async {
    expect(await platform.getPlatformVersion(), '42');
  });
}


================================================
FILE: evi/evi-flutter/audio/test/audio_test.dart
================================================
import 'package:flutter_test/flutter_test.dart';
import 'package:audio/audio.dart';
import 'package:audio/audio_platform_interface.dart';
import 'package:audio/audio_method_channel.dart';
import 'package:plugin_platform_interface/plugin_platform_interface.dart';

class MockAudioPlatform
    with MockPlatformInterfaceMixin
    implements AudioPlatform {

  @override
  Future<String?> getPlatformVersion() => Future.value('42');
}

void main() {
  final AudioPlatform initialPlatform = AudioPlatform.instance;

  test('$MethodChannelAudio is the default instance', () {
    expect(initialPlatform, isInstanceOf<MethodChannelAudio>());
  });

  test('getPlatformVersion', () async {
    Audio audioPlugin = Audio();
    MockAudioPlatform fakePlatform = MockAudioPlatform();
    AudioPlatform.instance = fakePlatform;

    expect(await audioPlugin.getPlatformVersion(), '42');
  });
}


================================================
FILE: evi/evi-flutter/ios/.gitignore
================================================
**/dgph
*.mode1v3
*.mode2v3
*.moved-aside
*.pbxuser
*.perspectivev3
**/*sync/
.sconsign.dblite
.tags*
**/.vagrant/
**/DerivedData/
Icon?
**/Pods/
**/.symlinks/
profile
xcuserdata
**/.generated/
Flutter/App.framework
Flutter/Flutter.framework
Flutter/Flutter.podspec
Flutter/Generated.xcconfig
Flutter/ephemeral/
Flutter/app.flx
Flutter/app.zip
Flutter/flutter_assets/
Flutter/flutter_export_environment.sh
ServiceDefinitions.json
Runner/GeneratedPluginRegistrant.*

# Exceptions to above rules.
!default.mode1v3
!default.mode2v3
!default.pbxuser
!default.perspectivev3


================================================
FILE: evi/evi-flutter/ios/Flutter/AppFrameworkInfo.plist
================================================
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0">
<dict>
  <key>CFBundleDevelopmentRegion</key>
  <string>en</string>
  <key>CFBundleExecutable</key>
  <string>App</string>
  <key>CFBundleIdentifier</key>
  <string>io.flutter.flutter.app</string>
  <key>CFBundleInfoDictionaryVersion</key>
  <string>6.0</string>
  <key>CFBundleName</key>
  <string>App</string>
  <key>CFBundlePackageType</key>
  <string>FMWK</string>
  <key>CFBundleShortVersionString</key>
  <string>1.0</string>
  <key>CFBundleSignature</key>
  <string>????</string>
  <key>CFBundleVersion</key>
  <string>1.0</string>
  <key>MinimumOSVersion</key>
  <string>12.0</string>
</dict>
</plist>


================================================
FILE: evi/evi-flutter/ios/Flutter/Debug.xcconfig
================================================
#include? "Pods/Target Support Files/Pods-Runner/Pods-Runner.debug.xcconfig"
#include "Generated.xcconfig"


================================================
FILE: evi/evi-flutter/ios/Flutter/Release.xcconfig
================================================
#include? "Pods/Target Support Files/Pods-Runner/Pods-Runner.release.xcconfig"
#include "Generated.xcconfig"


================================================
FILE: evi/evi-flutter/ios/Podfile
================================================
# Uncomment this line to define a global platform for your project
# platform :ios, '12.0'

# CocoaPods analytics sends network stats synchronously affecting flutter build latency.
ENV['COCOAPODS_DISABLE_STATS'] = 'true'

project 'Runner', {
  'Debug' => :debug,
  'Profile' => :release,
  'Release' => :release,
}

def flutter_root
  generated_xcode_build_settings_path = File.expand_path(File.join('..', 'Flutter', 'Generated.xcconfig'), __FILE__)
  unless File.exist?(generated_xcode_build_settings_path)
    raise "#{generated_xcode_build_settings_path} must exist. If you're running pod install manually, make sure flutter pub get is executed first"
  end

  File.foreach(generated_xcode_build_settings_path) do |line|
    matches = line.match(/FLUTTER_ROOT\=(.*)/)
    return matches[1].strip if matches
  end
  raise "FLUTTER_ROOT not found in #{generated_xcode_build_settings_path}. Try deleting Generated.xcconfig, then run flutter pub get"
end

require File.expand_path(File.join('packages', 'flutter_tools', 'bin', 'podhelper'), flutter_root)

flutter_ios_podfile_setup

target 'Runner' do
  use_frameworks!
  use_modular_headers!

  flutter_install_all_ios_pods File.dirname(File.realpath(__FILE__))
  target 'RunnerTests' do
    inherit! :search_paths
  end
end

post_install do |installer|
  installer.pods_project.targets.each do |target|
    flutter_additional_ios_build_settings(target)
  end
end


================================================
FILE: evi/evi-flutter/ios/Runner/AppDelegate.swift
================================================
import Flutter
import UIKit

@main
@objc class AppDelegate: FlutterAppDelegate {
  override func application(
    _ application: UIApplication,
    didFinishLaunchingWithOptions launchOptions: [UIApplication.LaunchOptionsKey: Any]?
  ) -> Bool {
    GeneratedPluginRegistrant.register(with: self)
    return super.application(application, didFinishLaunchingWithOptions: launchOptions)
  }
}


================================================
FILE: evi/evi-flutter/ios/Runner/Assets.xcassets/AppIcon.appiconset/Contents.json
================================================
{
  "images" : [
    {
      "size" : "20x20",
      "idiom" : "iphone",
      "filename" : "Icon-App-20x20@2x.png",
      "scale" : "2x"
    },
    {
      "size" : "20x20",
      "idiom" : "iphone",
      "filename" : "Icon-App-20x20@3x.png",
      "scale" : "3x"
    },
    {
      "size" : "29x29",
      "idiom" : "iphone",
      "filename" : "Icon-App-29x29@1x.png",
      "scale" : "1x"
    },
    {
      "size" : "29x29",
      "idiom" : "iphone",
      "filename" : "Icon-App-29x29@2x.png",
      "scale" : "2x"
    },
    {
      "size" : "29x29",
      "idiom" : "iphone",
      "filename" : "Icon-App-29x29@3x.png",
      "scale" : "3x"
    },
    {
      "size" : "40x40",
      "idiom" : "iphone",
      "filename" : "Icon-App-40x40@2x.png",
      "scale" : "2x"
    },
    {
      "size" : "40x40",
      "idiom" : "iphone",
      "filename" : "Icon-App-40x40@3x.png",
      "scale" : "3x"
    },
    {
      "size" : "60x60",
      "idiom" : "iphone",
      "filename" : "Icon-App-60x60@2x.png",
      "scale" : "2x"
    },
    {
      "size" : "60x60",
      "idiom" : "iphone",
      "filename" : "Icon-App-60x60@3x.png",
      "scale" : "3x"
    },
    {
      "size" : "20x20",
      "idiom" : "ipad",
      "filename" : "Icon-App-20x20@1x.png",
      "scale" : "1x"
    },
    {
      "size" : "20x20",
      "idiom" : "ipad",
      "filename" : "Icon-App-20x20@2x.png",
      "scale" : "2x"
    },
    {
      "size" : "29x29",
      "idiom" : "ipad",
      "filename" : "Icon-App-29x29@1x.png",
      "scale" : "1x"
    },
    {
      "size" : "29x29",
      "idiom" : "ipad",
      "filename" : "Icon-App-29x29@2x.png",
      "scale" : "2x"
    },
    {
      "size" : "40x40",
      "idiom" : "ipad",
      "filename" : "Icon-App-40x40@1x.png",
      "scale" : "1x"
    },
    {
      "size" : "40x40",
      "idiom" : "ipad",
      "filename" : "Icon-App-40x40@2x.png",
      "scale" : "2x"
    },
    {
      "size" : "76x76",
      "idiom" : "ipad",
      "filename" : "Icon-App-76x76@1x.png",
      "scale" : "1x"
    },
    {
      "size" : "76x76",
      "idiom" : "ipad",
      "filename" : "Icon-App-76x76@2x.png",
      "scale" : "2x"
    },
    {
      "size" : "83.5x83.5",
      "idiom" : "ipad",
      "filename" : "Icon-App-83.5x83.5@2x.png",
      "scale" : "2x"
    },
    {
      "size" : "1024x1024",
      "idiom" : "ios-marketing",
      "filename" : "Icon-App-1024x1024@1x.png",
      "scale" : "1x"
    }
  ],
  "info" : {
    "version" : 1,
    "author" : "xcode"
  }
}


================================================
FILE: evi/evi-flutter/ios/Runner/Assets.xcassets/LaunchImage.imageset/Contents.json
================================================
{
  "images" : [
    {
      "idiom" : "universal",
      "filename" : "LaunchImage.png",
      "scale" : "1x"
    },
    {
      "idiom" : "universal",
      "filename" : "LaunchImage@2x.png",
      "scale" : "2x"
    },
    {
      "idiom" : "universal",
      "filename" : "LaunchImage@3x.png",
      "scale" : "3x"
    }
  ],
  "info" : {
    "version" : 1,
    "author" : "xcode"
  }
}


================================================
FILE: evi/evi-flutter/ios/Runner/Assets.xcassets/LaunchImage.imageset/README.md
================================================
# Launch Screen Assets

You can customize the launch screen with your own desired assets by replacing the image files in this directory.

You can also do it by opening your Flutter project's Xcode project with `open ios/Runner.xcworkspace`, selecting `Runner/Assets.xcassets` in the Project Navigator and dropping in the desired images.

================================================
FILE: evi/evi-flutter/ios/Runner/Base.lproj/LaunchScreen.storyboard
================================================
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
<document type="com.apple.InterfaceBuilder3.CocoaTouch.Storyboard.XIB" version="3.0" toolsVersion="12121" systemVersion="16G29" targetRuntime="iOS.CocoaTouch" propertyAccessControl="none" useAutolayout="YES" launchScreen="YES" colorMatched="YES" initialViewController="01J-lp-oVM">
    <dependencies>
        <deployment identifier="iOS"/>
        <plugIn identifier="com.apple.InterfaceBuilder.IBCocoaTouchPlugin" version="12089"/>
    </dependencies>
    <scenes>
        <!--View Controller-->
        <scene sceneID="EHf-IW-A2E">
            <objects>
                <viewController id="01J-lp-oVM" sceneMemberID="viewController">
                    <layoutGuides>
                        <viewControllerLayoutGuide type="top" id="Ydg-fD-yQy"/>
                        <viewControllerLayoutGuide type="bottom" id="xbc-2k-c8Z"/>
                    </layoutGuides>
                    <view key="view" contentMode="scaleToFill" id="Ze5-6b-2t3">
                        <autoresizingMask key="autoresizingMask" widthSizable="YES" heightSizable="YES"/>
                        <subviews>
                            <imageView opaque="NO" clipsSubviews="YES" multipleTouchEnabled="YES" contentMode="center" image="LaunchImage" translatesAutoresizingMaskIntoConstraints="NO" id="YRO-k0-Ey4">
                            </imageView>
                        </subviews>
                        <color key="backgroundColor" red="1" green="1" blue="1" alpha="1" colorSpace="custom" customColorSpace="sRGB"/>
                        <constraints>
                            <constraint firstItem="YRO-k0-Ey4" firstAttribute="centerX" secondItem="Ze5-6b-2t3" secondAttribute="centerX" id="1a2-6s-vTC"/>
                            <constraint firstItem="YRO-k0-Ey4" firstAttribute="centerY" secondItem="Ze5-6b-2t3" secondAttribute="centerY" id="4X2-HB-R7a"/>
                        </constraints>
                    </view>
                </viewController>
                <placeholder placeholderIdentifier="IBFirstResponder" id="iYj-Kq-Ea1" userLabel="First Responder" sceneMemberID="firstResponder"/>
            </objects>
            <point key="canvasLocation" x="53" y="375"/>
        </scene>
    </scenes>
    <resources>
        <image name="LaunchImage" width="168" height="185"/>
    </resources>
</document>


================================================
FILE: evi/evi-flutter/ios/Runner/Base.lproj/Main.storyboard
================================================
<?xml version="1.0" encoding="UTF-8"?>
<document type="com.apple.InterfaceBuilder3.CocoaTouch.Storyboard.XIB" version="3.0" toolsVersion="23094" targetRuntime="iOS.CocoaTouch" propertyAccessControl="none" useAutolayout="YES" useTraitCollections="YES" colorMatched="YES" initialViewController="BYZ-38-t0r">
    <device id="retina6_12" orientation="portrait" appearance="light"/>
    <dependencies>
        <deployment identifier="iOS"/>
        <plugIn identifier="com.apple.InterfaceBuilder.IBCocoaTouchPlugin" version="23084"/>
        <capability name="documents saved in the Xcode 8 format" minToolsVersion="8.0"/>
    </dependencies>
    <scenes>
        <!--Flutter View Controller-->
        <scene sceneID="tne-QT-ifu">
            <objects>
                <viewController id="BYZ-38-t0r" customClass="FlutterViewController" sceneMemberID="viewController">
                    <layoutGuides>
                        <viewControllerLayoutGuide type="top" id="y3c-jy-aDJ"/>
                        <viewControllerLayoutGuide type="bottom" id="wfy-db-euE"/>
                    </layoutGuides>
                    <view key="view" contentMode="scaleToFill" id="8bC-Xf-vdC">
                        <rect key="frame" x="0.0" y="0.0" width="393" height="852"/>
                        <autoresizingMask key="autoresizingMask" widthSizable="YES" heightSizable="YES"/>
                        <color key="backgroundColor" red="1" green="1" blue="1" alpha="1" colorSpace="custom" customColorSpace="sRGB"/>
                    </view>
                </viewController>
                <placeholder placeholderIdentifier="IBFirstResponder" id="dkx-z0-nzr" sceneMemberID="firstResponder"/>
            </objects>
            <point key="canvasLocation" x="68" y="-34"/>
        </scene>
    </scenes>
</document>


================================================
FILE: evi/evi-flutter/ios/Runner/Info.plist
================================================
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0">
<dict>
	<key>CFBundleDevelopmentRegion</key>
	<string>$(DEVELOPMENT_LANGUAGE)</string>
	<key>CFBundleDisplayName</key>
	<string>Evi Example</string>
	<key>CFBundleExecutable</key>
	<string>$(EXECUTABLE_NAME)</string>
	<key>CFBundleIdentifier</key>
	<string>$(PRODUCT_BUNDLE_IDENTIFIER)</string>
	<key>CFBundleInfoDictionaryVersion</key>
	<string>6.0</string>
	<key>CFBundleName</key>
	<string>evi_example</string>
	<key>CFBundlePackageType</key>
	<string>APPL</string>
	<key>CFBundleShortVersionString</key>
	<string>$(FLUTTER_BUILD_NAME)</string>
	<key>CFBundleSignature</key>
	<string>????</string>
	<key>CFBundleVersion</key>
	<string>$(FLUTTER_BUILD_NUMBER)</string>
	<key>LSRequiresIPhoneOS</key>
	<true/>
	<key>UILaunchStoryboardName</key>
	<string>LaunchScreen</string>
	<key>UIMainStoryboardFile</key>
	<string>Main</string>
	<key>UISupportedInterfaceOrientations</key>
	<array>
		<string>UIInterfaceOrientationPortrait</string>
		<string>UIInterfaceOrientationLandscapeLeft</string>
		<string>UIInterfaceOrientationLandscapeRight</string>
	</array>
	<key>UISupportedInterfaceOrientations~ipad</key>
	<array>
		<string>UIInterfaceOrientationPortrait</string>
		<string>UIInterfaceOrientationPortraitUpsideDown</string>
		<string>UIInterfaceOrientationLandscapeLeft</string>
		<string>UIInterfaceOrientationLandscapeRight</string>
	</array>
	<key>CADisableMinimumFrameDurationOnPhone</key>
	<true/>
	<key>UIApplicationSupportsIndirectInputEvents</key>
	<true/>
	<key>NSMicrophoneUsageDescription</key>
	<string>Transmits speech to a conversational AI</string>
</dict>
</plist>


================================================
FILE: evi/evi-flutter/ios/Runner/Runner-Bridging-Header.h
================================================
#import "GeneratedPluginRegistrant.h"


================================================
FILE: evi/evi-flutter/ios/Runner.xcodeproj/project.pbxproj
================================================
// !$*UTF8*$!
{
	archiveVersion = 1;
	classes = {
	};
	objectVersion = 54;
	objects = {

/* Begin PBXBuildFile section */
		1498D2341E8E89220040F4C2 /* GeneratedPluginRegistrant.m in Sources */ = {isa = PBXBuildFile; fileRef = 1498D2331E8E89220040F4C2 /* GeneratedPluginRegistrant.m */; };
		331C808B294A63AB00263BE5 /* RunnerTests.swift in Sources */ = {isa = PBXBuildFile; fileRef = 331C807B294A618700263BE5 /* RunnerTests.swift */; };
		3B3967161E833CAA004F5970 /* AppFrameworkInfo.plist in Resources */ = {isa = PBXBuildFile; fileRef = 3B3967151E833CAA004F5970 /* AppFrameworkInfo.plist */; };
		711D2FFCE0FE40B34BED4AB3 /* Pods_RunnerTests.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 3CB28A3E128E29A157E09664 /* Pods_RunnerTests.framework */; };
		74858FAF1ED2DC5600515810 /* AppDelegate.swift in Sources */ = {isa = PBXBuildFile; fileRef = 74858FAE1ED2DC5600515810 /* AppDelegate.swift */; };
		97C146FC1CF9000F007C117D /* Main.storyboard in Resources */ = {isa = PBXBuildFile; fileRef = 97C146FA1CF9000F007C117D /* Main.storyboard */; };
		97C146FE1CF9000F007C117D /* Assets.xcassets in Resources */ = {isa = PBXBuildFile; fileRef = 97C146FD1CF9000F007C117D /* Assets.xcassets */; };
		97C147011CF9000F007C117D /* LaunchScreen.storyboard in Resources */ = {isa = PBXBuildFile; fileRef = 97C146FF1CF9000F007C117D /* LaunchScreen.storyboard */; };
		CA39E55412833DBA2A92AB4E /* Pods_Runner.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 9E1187B4A5C75EEA06C9F271 /* Pods_Runner.framework */; };
/* End PBXBuildFile section */

/* Begin PBXContainerItemProxy section */
		331C8085294A63A400263BE5 /* PBXContainerItemProxy */ = {
			isa = PBXContainerItemProxy;
			containerPortal = 97C146E61CF9000F007C117D /* Project object */;
			proxyType = 1;
			remoteGlobalIDString = 97C146ED1CF9000F007C117D;
			remoteInfo = Runner;
		};
/* End PBXContainerItemProxy section */

/* Begin PBXCopyFilesBuildPhase section */
		9705A1C41CF9048500538489 /* Embed Frameworks */ = {
			isa = PBXCopyFilesBuildPhase;
			buildActionMask = 2147483647;
			dstPath = "";
			dstSubfolderSpec = 10;
			files = (
			);
			name = "Embed Frameworks";
			runOnlyForDeploymentPostprocessing = 0;
		};
/* End PBXCopyFilesBuildPhase section */

/* Begin PBXFileReference section */
		08CD5310A37D5E4C76389779 /* Pods-RunnerTests.debug.xcconfig */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = text.xcconfig; name = "Pods-RunnerTests.debug.xcconfig"; path = "Target Support Files/Pods-RunnerTests/Pods-RunnerTests.debug.xcconfig"; sourceTree = "<group>"; };
		1498D2321E8E86230040F4C2 /* GeneratedPluginRegistrant.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = GeneratedPluginRegistrant.h; sourceTree = "<group>"; };
		1498D2331E8E89220040F4C2 /* GeneratedPluginRegistrant.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = GeneratedPluginRegistrant.m; sourceTree = "<group>"; };
		331C807B294A618700263BE5 /* RunnerTests.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = RunnerTests.swift; sourceTree = "<group>"; };
		331C8081294A63A400263BE5 /* RunnerTests.xctest */ = {isa = PBXFileReference; explicitFileType = wrapper.cfbundle; includeInIndex = 0; path = RunnerTests.xctest; sourceTree = BUILT_PRODUCTS_DIR; };
		3B3967151E833CAA004F5970 /* AppFrameworkInfo.plist */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = text.plist.xml; name = AppFrameworkInfo.plist; path = Flutter/AppFrameworkInfo.plist; sourceTree = "<group>"; };
		3CB28A3E128E29A157E09664 /* Pods_RunnerTests.framework */ = {isa = PBXFileReference; explicitFileType = wrapper.framework; includeInIndex = 0; path = Pods_RunnerTests.framework; sourceTree = BUILT_PRODUCTS_DIR; };
		5EB49DBBBD50A5533BF1F17B /* Pods-Runner.profile.xcconfig */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = text.xcconfig; name = "Pods-Runner.profile.xcconfig"; path = "Target Support Files/Pods-Runner/Pods-Runner.profile.xcconfig"; sourceTree = "<group>"; };
		74858FAD1ED2DC5600515810 /* Runner-Bridging-Header.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = "Runner-Bridging-Header.h"; sourceTree = "<group>"; };
		74858FAE1ED2DC5600515810 /* AppDelegate.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = AppDelegate.swift; sourceTree = "<group>"; };
		7AFA3C8E1D35360C0083082E /* Release.xcconfig */ = {isa = PBXFileReference; lastKnownFileType = text.xcconfig; name = Release.xcconfig; path = Flutter/Release.xcconfig; sourceTree = "<group>"; };
		9740EEB21CF90195004384FC /* Debug.xcconfig */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = text.xcconfig; name = Debug.xcconfig; path = Flutter/Debug.xcconfig; sourceTree = "<group>"; };
		9740EEB31CF90195004384FC /* Generated.xcconfig */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = text.xcconfig; name = Generated.xcconfig; path = Flutter/Generated.xcconfig; sourceTree = "<group>"; };
		97C146EE1CF9000F007C117D /* Runner.app */ = {isa = PBXFileReference; explicitFileType = wrapper.application; includeInIndex = 0; path = Runner.app; sourceTree = BUILT_PRODUCTS_DIR; };
		97C146FB1CF9000F007C117D /* Base */ = {isa = PBXFileReference; lastKnownFileType = file.storyboard; name = Base; path = Base.lproj/Main.storyboard; sourceTree = "<group>"; };
		97C146FD1CF9000F007C117D /* Assets.xcassets */ = {isa = PBXFileReference; lastKnownFileType = folder.assetcatalog; path = Assets.xcassets; sourceTree = "<group>"; };
		97C147001CF9000F007C117D /* Base */ = {isa = PBXFileReference; lastKnownFileType = file.storyboard; name = Base; path = Base.lproj/LaunchScreen.storyboard; sourceTree = "<group>"; };
		97C147021CF9000F007C117D /* Info.plist */ = {isa = PBXFileReference; lastKnownFileType = text.plist.xml; path = Info.plist; sourceTree = "<group>"; };
		9E1187B4A5C75EEA06C9F271 /* Pods_Runner.framework */ = {isa = PBXFileReference; explicitFileType = wrapper.framework; includeInIndex = 0; path = Pods_Runner.framework; sourceTree = BUILT_PRODUCTS_DIR; };
		AAB04AD19FC40D502B31E905 /* Pods-Runner.release.xcconfig */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = text.xcconfig; name = "Pods-Runner.release.xcconfig"; path = "Target Support Files/Pods-Runner/Pods-Runner.release.xcconfig"; sourceTree = "<group>"; };
		C86C8F4A02C79AC1A7BF6F09 /* Pods-Runner.debug.xcconfig */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = text.xcconfig; name = "Pods-Runner.debug.xcconfig"; path = "Target Support Files/Pods-Runner/Pods-Runner.debug.xcconfig"; sourceTree = "<group>"; };
		E65E8B38E3E3EC1E0351A6F2 /* Pods-RunnerTests.release.xcconfig */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = text.xcconfig; name = "Pods-RunnerTests.release.xcconfig"; path = "Target Support Files/Pods-RunnerTests/Pods-RunnerTests.release.xcconfig"; sourceTree = "<group>"; };
		E6FCCA409E121E10B5878E69 /* Pods-RunnerTests.profile.xcconfig */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = text.xcconfig; name = "Pods-RunnerTests.profile.xcconfig"; path = "Target Support Files/Pods-RunnerTests/Pods-RunnerTests.profile.xcconfig"; sourceTree = "<group>"; };
/* End PBXFileReference section */

/* Begin PBXFrameworksBuildPhase section */
		97C146EB1CF9000F007C117D /* Frameworks */ = {
			isa = PBXFrameworksBuildPhase;
			buildActionMask = 2147483647;
			files = (
				CA39E55412833DBA2A92AB4E /* Pods_Runner.framework in Frameworks */,
			);
			runOnlyForDeploymentPostprocessing = 0;
		};
		A9E43356DD9F64ADD0BD1417 /* Frameworks */ = {
			isa = PBXFrameworksBuildPhase;
			buildActionMask = 2147483647;
			files = (
				711D2FFCE0FE40B34BED4AB3 /* Pods_RunnerTests.framework in Frameworks */,
			);
			runOnlyForDeploymentPostprocessing = 0;
		};
/* End PBXFrameworksBuildPhase section */

/* Begin PBXGroup section */
		331C8082294A63A400263BE5 /* RunnerTests */ = {
			isa = PBXGroup;
			children = (
				331C807B294A618700263BE5 /* RunnerTests.swift */,
			);
			path = RunnerTests;
			sourceTree = "<group>";
		};
		630333982710EBFF5EEEF8EB /* Pods */ = {
			isa = PBXGroup;
			children = (
				C86C8F4A02C79AC1A7BF6F09 /* Pods-Runner.debug.xcconfig */,
				AAB04AD19FC40D502B31E905 /* Pods-Runner.release.xcconfig */,
				5EB49DBBBD50A5533BF1F17B /* Pods-Runner.profile.xcconfig */,
				08CD5310A37D5E4C76389779 /* Pods-RunnerTests.debug.xcconfig */,
				E65E8B38E3E3EC1E0351A6F2 /* Pods-RunnerTests.release.xcconfig */,
				E6FCCA409E121E10B5878E69 /* Pods-RunnerTests.profile.xcconfig */,
			);
			path = Pods;
			sourceTree = "<group>";
		};
		9740EEB11CF90186004384FC /* Flutter */ = {
			isa = PBXGroup;
			children = (
				3B3967151E833CAA004F5970 /* AppFrameworkInfo.plist */,
				9740EEB21CF90195004384FC /* Debug.xcconfig */,
				7AFA3C8E1D35360C0083082E /* Release.xcconfig */,
				9740EEB31CF90195004384FC /* Generated.xcconfig */,
			);
			name = Flutter;
			sourceTree = "<group>";
		};
		97C146E51CF9000F007C117D = {
			isa = PBXGroup;
			children = (
				9740EEB11CF90186004384FC /* Flutter */,
				97C146F01CF9000F007C117D /* Runner */,
				97C146EF1CF9000F007C117D /* Products */,
				331C8082294A63A400263BE5 /* RunnerTests */,
				630333982710EBFF5EEEF8EB /* Pods */,
				BDAF68A17B0E5205A5813592 /* Frameworks */,
			);
			sourceTree = "<group>";
		};
		97C146EF1CF9000F007C117D /* Products */ = {
			isa = PBXGroup;
			children = (
				97C146EE1CF9000F007C117D /* Runner.app */,
				331C8081294A63A400263BE5 /* RunnerTests.xctest */,
			);
			name = Products;
			sourceTree = "<group>";
		};
		97C146F01CF9000F007C117D /* Runner */ = {
			isa = PBXGroup;
			children = (
				97C146FA1CF9000F007C117D /* Main.storyboard */,
				97C146FD1CF9000F007C117D /* Assets.xcassets */,
				97C146FF1CF9000F007C117D /* LaunchScreen.storyboard */,
				97C147021CF9000F007C117D /* Info.plist */,
				1498D2321E8E86230040F4C2 /* GeneratedPluginRegistrant.h */,
				1498D2331E8E89220040F4C2 /* GeneratedPluginRegistrant.m */,
				74858FAE1ED2DC5600515810 /* AppDelegate.swift */,
				74858FAD1ED2DC5600515810 /* Runner-Bridging-Header.h */,
			);
			path = Runner;
			sourceTree = "<group>";
		};
		BDAF68A17B0E5205A5813592 /* Frameworks */ = {
			isa = PBXGroup;
			children = (
				9E1187B4A5C75EEA06C9F271 /* Pods_Runner.framework */,
				3CB28A3E128E29A157E09664 /* Pods_RunnerTests.framework */,
			);
			name = Frameworks;
			sourceTree = "<group>";
		};
/* End PBXGroup section */

/* Begin PBXNativeTarget section */
		331C8080294A63A400263BE5 /* RunnerTests */ = {
			isa = PBXNativeTarget;
			buildConfigurationList = 331C8087294A63A400263BE5 /* Build configuration list for PBXNativeTarget "RunnerTests" */;
			buildPhases = (
				3A5484AA9983D77D1D18672C /* [CP] Check Pods Manifest.lock */,
				331C807D294A63A400263BE5 /* Sources */,
				331C807F294A63A400263BE5 /* Resources */,
				A9E43356DD9F64ADD0BD1417 /* Frameworks */,
			);
			buildRules = (
			);
			dependencies = (
				331C8086294A63A400263BE5 /* PBXTargetDependency */,
			);
			name = RunnerTests;
			productName = RunnerTests;
			productReference = 331C8081294A63A400263BE5 /* RunnerTests.xctest */;
			productType = "com.apple.product-type.bundle.unit-test";
		};
		97C146ED1CF9000F007C117D /* Runner */ = {
			isa = PBXNativeTarget;
			buildConfigurationList = 97C147051CF9000F007C117D /* Build configuration list for PBXNativeTarget "Runner" */;
			buildPhases = (
				E4B47E4D1D6EF101684F156C /* [CP] Check Pods Manifest.lock */,
				9740EEB61CF901F6004384FC /* Run Script */,
				97C146EA1CF9000F007C117D /* Sources */,
				97C146EB1CF9000F007C117D /* Frameworks */,
				97C146EC1CF9000F007C117D /* Resources */,
				9705A1C41CF9048500538489 /* Embed Frameworks */,
				3B06AD1E1E4923F5004D2608 /* Thin Binary */,
				399757031974BA9421307DCF /* [CP] Embed Pods Frameworks */,
			);
			buildRules = (
			);
			dependencies = (
			);
			name = Runner;
			productName = Runner;
			productReference = 97C146EE1CF9000F007C117D /* Runner.app */;
			productType = "com.apple.product-type.application";
		};
/* End PBXNativeTarget section */

/* Begin PBXProject section */
		97C146E61CF9000F007C117D /* Project object */ = {
			isa = PBXProject;
			attributes = {
				BuildIndependentTargetsInParallel = YES;
				LastUpgradeCheck = 1510;
				ORGANIZATIONNAME = "";
				TargetAttributes = {
					331C8080294A63A400263BE5 = {
						CreatedOnToolsVersion = 14.0;
						TestTargetID = 97C146ED1CF9000F007C117D;
					};
					97C146ED1CF9000F007C117D = {
						CreatedOnToolsVersion = 7.3.1;
						LastSwiftMigration = 1100;
					};
				};
			};
			buildConfigurationList = 97C146E91CF9000F007C117D /* Build configuration list for PBXProject "Runner" */;
			compatibilityVersion = "Xcode 9.3";
			developmentRegion = en;
			hasScannedForEncodings = 0;
			knownRegions = (
				en,
				Base,
			);
			mainGroup = 97C146E51CF9000F007C117D;
			productRefGroup = 97C146EF1CF9000F007C117D /* Products */;
			projectDirPath = "";
			projectRoot = "";
			targets = (
				97C146ED1CF9000F007C117D /* Runner */,
				331C8080294A63A400263BE5 /* RunnerTests */,
			);
		};
/* End PBXProject section */

/* Begin PBXResourcesBuildPhase section */
		331C807F294A63A400263BE5 /* Resources */ = {
			isa = PBXResourcesBuildPhase;
			buildActionMask = 2147483647;
			files = (
			);
			runOnlyForDeploymentPostprocessing = 0;
		};
		97C146EC1CF9000F007C117D /* Resources */ = {
			isa = PBXResourcesBuildPhase;
			buildActionMask = 2147483647;
			files = (
				97C147011CF9000F007C117D /* LaunchScreen.storyboard in Resources */,
				3B3967161E833CAA004F5970 /* AppFrameworkInfo.plist in Resources */,
				97C146FE1CF9000F007C117D /* Assets.xcassets in Resources */,
				97C146FC1CF9000F007C117D /* Main.storyboard in Resources */,
			);
			runOnlyForDeploymentPostprocessing = 0;
		};
/* End PBXResourcesBuildPhase section */

/* Begin PBXShellScriptBuildPhase section */
		399757031974BA9421307DCF /* [CP] Embed Pods Frameworks */ = {
			isa = PBXShellScriptBuildPhase;
			buildActionMask = 2147483647;
			files = (
			);
			inputFileListPaths = (
				"${PODS_ROOT}/Target Support Files/Pods-Runner/Pods-Runner-frameworks-${CONFIGURATION}-input-files.xcfilelist",
			);
			name = "[CP] Embed Pods Frameworks";
			outputFileListPaths = (
				"${PODS_ROOT}/Target Support Files/Pods-Runner/Pods-Runner-frameworks-${CONFIGURATION}-output-files.xcfilelist",
			);
			runOnlyForDeploymentPostprocessing = 0;
			shellPath = /bin/sh;
			shellScript = "\"${PODS_ROOT}/Target Support Files/Pods-Runner/Pods-Runner-frameworks.sh\"\n";
			showEnvVarsInLog = 0;
		};
		3A5484AA9983D77D1D18672C /* [CP] Check Pods Manifest.lock */ = {
			isa = PBXShellScriptBuildPhase;
			buildActionMask = 2147483647;
			files = (
			);
			inputFileListPaths = (
			);
			inputPaths = (
				"${PODS_PODFILE_DIR_PATH}/Podfile.lock",
				"${PODS_ROOT}/Manifest.lock",
			);
			name = "[CP] Check Pods Manifest.lock";
			outputFileListPaths = (
			);
			outputPaths = (
				"$(DERIVED_FILE_DIR)/Pods-RunnerTests-checkManifestLockResult.txt",
			);
			runOnlyForDeploymentPostprocessing = 0;
			shellPath = /bin/sh;
			shellScript = "diff \"${PODS_PODFILE_DIR_PATH}/Podfile.lock\" \"${PODS_ROOT}/Manifest.lock\" > /dev/null\nif [ $? != 0 ] ; then\n    # print error to STDERR\n    echo \"error: The sandbox is not in sync with the Podfile.lock. Run 'pod install' or update your CocoaPods installation.\" >&2\n    exit 1\nfi\n# This output is used by Xcode 'outputs' to avoid re-running this script phase.\necho \"SUCCESS\" > \"${SCRIPT_OUTPUT_FILE_0}\"\n";
			showEnvVarsInLog = 0;
		};
		3B06AD1E1E4923F5004D2608 /* Thin Binary */ = {
			isa = PBXShellScriptBuildPhase;
			alwaysOutOfDate = 1;
			buildActionMask = 2147483647;
			files = (
			);
			inputPaths = (
				"${TARGET_BUILD_DIR}/${INFOPLIST_PATH}",
			);
			name = "Thin Binary";
			outputPaths = (
			);
			runOnlyForDeploymentPostprocessing = 0;
			shellPath = /bin/sh;
			shellScript = "/bin/sh \"$FLUTTER_ROOT/packages/flutter_tools/bin/xcode_backend.sh\" embed_and_thin";
		};
		9740EEB61CF901F6004384FC /* Run Script */ = {
			isa = PBXShellScriptBuildPhase;
			alwaysOutOfDate = 1;
			buildActionMask = 2147483647;
			files = (
			);
			inputPaths = (
			);
			name = "Run Script";
			outputPaths = (
			);
			runOnlyForDeploymentPostprocessing = 0;
			shellPath = /bin/sh;
			shellScript = "/bin/sh \"$FLUTTER_ROOT/packages/flutter_tools/bin/xcode_backend.sh\" build";
		};
		E4B47E4D1D6EF101684F156C /* [CP] Check Pods Manifest.lock */ = {
			isa = PBXShellScriptBuildPhase;
			buildActionMask = 2147483647;
			files = (
			);
			inputFileListPaths = (
			);
			inputPaths = (
				"${PODS_PODFILE_DIR_PATH}/Podfile.lock",
				"${PODS_ROOT}/Manifest.lock",
			);
			name = "[CP] Check Pods Manifest.lock";
			outputFileListPaths = (
			);
			outputPaths = (
				"$(DERIVED_FILE_DIR)/Pods-Runner-checkManifestLockResult.txt",
			);
			runOnlyForDeploymentPostprocessing = 0;
			shellPath = /bin/sh;
			shellScript = "diff \"${PODS_PODFILE_DIR_PATH}/Podfile.lock\" \"${PODS_ROOT}/Manifest.lock\" > /dev/null\nif [ $? != 0 ] ; then\n    # print error to STDERR\n    echo \"error: The sandbox is not in sync with the Podfile.lock. Run 'pod install' or update your CocoaPods installation.\" >&2\n    exit 1\nfi\n# This output is used by Xcode 'outputs' to avoid re-running this script phase.\necho \"SUCCESS\" > \"${SCRIPT_OUTPUT_FILE_0}\"\n";
			showEnvVarsInLog = 0;
		};
/* End PBXShellScriptBuildPhase section */

/* Begin PBXSourcesBuildPhase section */
		331C807D294A63A400263BE5 /* Sources */ = {
			isa = PBXSourcesBuildPhase;
			buildActionMask = 2147483647;
			files = (
				331C808B294A63AB00263BE5 /* RunnerTests.swift in Sources */,
			);
			runOnlyForDeploymentPostprocessing = 0;
		};
		97C146EA1CF9000F007C117D /* Sources */ = {
			isa = PBXSourcesBuildPhase;
			buildActionMask = 2147483647;
			files = (
				74858FAF1ED2DC5600515810 /* AppDelegate.swift in Sources */,
				1498D2341E8E89220040F4C2 /* GeneratedPluginRegistrant.m in Sources */,
			);
			runOnlyForDeploymentPostprocessing = 0;
		};
/* End PBXSourcesBuildPhase section */

/* Begin PBXTargetDependency section */
		331C8086294A63A400263BE5 /* PBXTargetDependency */ = {
			isa = PBXTargetDependency;
			target = 97C146ED1CF9000F007C117D /* Runner */;
			targetProxy = 331C8085294A63A400263BE5 /* PBXContainerItemProxy */;
		};
/* End PBXTargetDependency section */

/* Begin PBXVariantGroup section */
		97C146FA1CF9000F007C117D /* Main.storyboard */ = {
			isa = PBXVariantGroup;
			children = (
				97C146FB1CF9000F007C117D /* Base */,
			);
			name = Main.storyboard;
			sourceTree = "<group>";
		};
		97C146FF1CF9000F007C117D /* LaunchScreen.storyboard */ = {
			isa = PBXVariantGroup;
			children = (
				97C147001CF9000F007C117D /* Base */,
			);
			name = LaunchScreen.storyboard;
			sourceTree = "<group>";
		};
/* End PBXVariantGroup section */

/* Begin XCBuildConfiguration section */
		249021D3217E4FDB00AE95B9 /* Profile */ = {
			isa = XCBuildConfiguration;
			buildSettings = {
				ALWAYS_SEARCH_USER_PATHS = NO;
				ASSETCATALOG_COMPILER_GENERATE_SWIFT_ASSET_SYMBOL_EXTENSIONS = YES;
				CLANG_ANALYZER_NONNULL = YES;
				CLANG_CXX_LANGUAGE_STANDARD = "gnu++0x";
				CLANG_CXX_LIBRARY = "libc++";
				CLANG_ENABLE_MODULES = YES;
				CLANG_ENABLE_OBJC_ARC = YES;
				CLANG_WARN_BLOCK_CAPTURE_AUTORELEASING = YES;
				CLANG_WARN_BOOL_CONVERSION = YES;
				CLANG_WARN_COMMA = YES;
				CLANG_WARN_CONSTANT_CONVERSION = YES;
				CLANG_WARN_DEPRECATED_OBJC_IMPLEMENTATIONS = YES;
				CLANG_WARN_DIRECT_OBJC_ISA_USAGE = YES_ERROR;
				CLANG_WARN_EMPTY_BODY = YES;
				CLANG_WARN_ENUM_CONVERSION = YES;
				CLANG_WARN_INFINITE_RECURSION = YES;
				CLANG_WARN_INT_CONVERSION = YES;
				CLANG_WARN_NON_LITERAL_NULL_CONVERSION = YES;
				CLANG_WARN_OBJC_IMPLICIT_RETAIN_SELF = YES;
				CLANG_WARN_OBJC_LITERAL_CONVERSION = YES;
				CLANG_WARN_OBJC_ROOT_CLASS = YES_ERROR;
				CLANG_WARN_RANGE_LOOP_ANALYSIS = YES;
				CLANG_WARN_STRICT_PROTOTYPES = YES;
				CLANG_WARN_SUSPICIOUS_MOVE = YES;
				CLANG_WARN_UNREACHABLE_CODE = YES;
				CLANG_WARN__DUPLICATE_METHOD_MATCH = YES;
				"CODE_SIGN_IDENTITY[sdk=iphoneos*]" = "iPhone Developer";
				COPY_PHASE_STRIP = NO;
				DEBUG_INFORMATION_FORMAT = "dwarf-with-dsym";
				ENABLE_NS_ASSERTIONS = NO;
				ENABLE_STRICT_OBJC_MSGSEND = YES;
				ENABLE_USER_SCRIPT_SANDBOXING = NO;
				GCC_C_LANGUAGE_STANDARD = gnu99;
				GCC_NO_COMMON_BLOCKS = YES;
				GCC_WARN_64_TO_32_BIT_CONVERSION = YES;
				GCC_WARN_ABOUT_RETURN_TYPE = YES_ERROR;
				GCC_WARN_UNDECLARED_SELECTOR = YES;
				GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE;
				GCC_WARN_UNUSED_FUNCTION = YES;
				GCC_WARN_UNUSED_VARIABLE = YES;
				IPHONEOS_DEPLOYMENT_TARGET = 13.0;
				MTL_ENABLE_DEBUG_INFO = NO;
				SDKROOT = iphoneos;
				SUPPORTED_PLATFORMS = iphoneos;
				TARGETED_DEVICE_FAMILY = "1,2";
				VALIDATE_PRODUCT = YES;
			};
			name = Profile;
		};
		249021D4217E4FDB00AE95B9 /* Profile */ = {
			isa = XCBuildConfiguration;
			baseConfigurationReference = 7AFA3C8E1D35360C0083082E /* Release.xcconfig */;
			buildSettings = {
				ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon;
				CLANG_ENABLE_MODULES = YES;
				CURRENT_PROJECT_VERSION = "$(FLUTTER_BUILD_NUMBER)";
				DEVELOPMENT_TEAM = QNW744Q794;
				ENABLE_BITCODE = NO;
				INFOPLIST_FILE = Runner/Info.plist;
				LD_RUNPATH_SEARCH_PATHS = (
					"$(inherited)",
					"@executable_path/Frameworks",
				);
				PRODUCT_BUNDLE_IDENTIFIER = com.example.eviExample;
				PRODUCT_NAME = "$(TARGET_NAME)";
				SWIFT_OBJC_BRIDGING_HEADER = "Runner/Runner-Bridging-Header.h";
				SWIFT_VERSION = 5.0;
				VERSIONING_SYSTEM = "apple-generic";
			};
			name = Profile;
		};
		331C8088294A63A400263BE5 /* Debug */ = {
			isa = XCBuildConfiguration;
			baseConfigurationReference = 08CD5310A37D5E4C76389779 /* Pods-RunnerTests.debug.xcconfig */;
			buildSettings = {
				BUNDLE_LOADER = "$(TEST_HOST)";
				CODE_SIGN_STYLE = Automatic;
				CURRENT_PROJECT_VERSION = 1;
				GENERATE_INFOPLIST_FILE = YES;
				MARKETING_VERSION = 1.0;
				PRODUCT_BUNDLE_IDENTIFIER = com.example.eviExample.RunnerTests;
				PRODUCT_NAME = "$(TARGET_NAME)";
				SWIFT_ACTIVE_COMPILATION_CONDITIONS = DEBUG;
				SWIFT_OPTIMIZATION_LEVEL = "-Onone";
				SWIFT_VERSION = 5.0;
				TEST_HOST = "$(BUILT_PRODUCTS_DIR)/Runner.app/$(BUNDLE_EXECUTABLE_FOLDER_PATH)/Runner";
			};
			name = Debug;
		};
		331C8089294A63A400263BE5 /* Release */ = {
			isa = XCBuildConfiguration;
			baseConfigurationReference = E65E8B38E3E3EC1E0351A6F2 /* Pods-RunnerTests.release.xcconfig */;
			buildSettings = {
				BUNDLE_LOADER = "$(TEST_HOST)";
				CODE_SIGN_STYLE = Automatic;
				CURRENT_PROJECT_VERSION = 1;
				GENERATE_INFOPLIST_FILE = YES;
				MARKETING_VERSION = 1.0;
				PRODUCT_BUNDLE_IDENTIFIER = com.example.eviExample.RunnerTests;
				PRODUCT_NAME = "$(TARGET_NAME)";
				SWIFT_VERSION = 5.0;
				TEST_HOST = "$(BUILT_PRODUCTS_DIR)/Runner.app/$(BUNDLE_EXECUTABLE_FOLDER_PATH)/Runner";
			};
			name = Release;
		};
		331C808A294A63A400263BE5 /* Profile */ = {
			isa = XCBuildConfiguration;
			baseConfigurationReference = E6FCCA409E121E10B5878E69 /* Pods-RunnerTests.profile.xcconfig */;
			buildSettings = {
				BUNDLE_LOADER = "$(TEST_HOST)";
				CODE_SIGN_STYLE = Automatic;
				CURRENT_PROJECT_VERSION = 1;
				GENERATE_INFOPLIST_FILE = YES;
				MARKETING_VERSION = 1.0;
				PRODUCT_BUNDLE_IDENTIFIER = com.example.eviExample.RunnerTests;
				PRODUCT_NAME = "$(TARGET_NAME)";
				SWIFT_VERSION = 5.0;
				TEST_HOST = "$(BUILT_PRODUCTS_DIR)/Runner.app/$(BUNDLE_EXECUTABLE_FOLDER_PATH)/Runner";
			};
			name = Profile;
		};
		97C147031CF9000F007C117D /* Debug */ = {
			isa = XCBuildConfiguration;
			buildSettings = {
				ALWAYS_SEARCH_USER_PATHS = NO;
				ASSETCATALOG_COMPILER_GENERATE_SWIFT_ASSET_SYMBOL_EXTENSIONS = YES;
				CLANG_ANALYZER_NONNULL = YES;
				CLANG_CXX_LANGUAGE_STANDARD = "gnu++0x";
				CLANG_CXX_LIBRARY = "libc++";
				CLANG_ENABLE_MODULES = YES;
				CLANG_ENABLE_OBJC_ARC = YES;
				CLANG_WARN_BLOCK_CAPTURE_AUTORELEASING = YES;
				CLANG_WARN_BOOL_CONVERSION = YES;
				CLANG_WARN_COMMA = YES;
				CLANG_WARN_CONSTANT_CONVERSION = YES;
				CLANG_WARN_DEPRECATED_OBJC_IMPLEMENTATIONS = YES;
				CLANG_WARN_DIRECT_OBJC_ISA_USAGE = YES_ERROR;
				CLANG_WARN_EMPTY_BODY = YES;
				CLANG_WARN_ENUM_CONVERSION = YES;
				CLANG_WARN_INFINITE_RECURSION = YES;
				CLANG_WARN_INT_CONVERSION = YES;
				CLANG_WARN_NON_LITERAL_NULL_CONVERSION = YES;
				CLANG_WARN_OBJC_IMPLICIT_RETAIN_SELF = YES;
				CLANG_WARN_OBJC_LITERAL_CONVERSION = YES;
				CLANG_WARN_OBJC_ROOT_CLASS = YES_ERROR;
				CLANG_WARN_RANGE_LOOP_ANALYSIS = YES;
				CLANG_WARN_STRICT_PROTOTYPES = YES;
				CLANG_WARN_SUSPICIOUS_MOVE = YES;
				CLANG_WARN_UNREACHABLE_CODE = YES;
				CLANG_WARN__DUPLICATE_METHOD_MATCH = YES;
				"CODE_SIGN_IDENTITY[sdk=iphoneos*]" = "iPhone Developer";
				COPY_PHASE_STRIP = NO;
				DEBUG_INFORMATION_FORMAT = dwarf;
				ENABLE_STRICT_OBJC_MSGSEND = YES;
				ENABLE_TESTABILITY = YES;
				ENABLE_USER_SCRIPT_SANDBOXING = NO;
				GCC_C_LANGUAGE_STANDARD = gnu99;
				GCC_DYNAMIC_NO_PIC = NO;
				GCC_NO_COMMON_BLOCKS = YES;
				GCC_OPTIMIZATION_LEVEL = 0;
				GCC_PREPROCESSOR_DEFINITIONS = (
					"DEBUG=1",
					"$(inherited)",
				);
				GCC_WARN_64_TO_32_BIT_CONVERSION = YES;
				GCC_WARN_ABOUT_RETURN_TYPE = YES_ERROR;
				GCC_WARN_UNDECLARED_SELECTOR = YES;
				GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE;
				GCC_WARN_UNUSED_FUNCTION = YES;
				GCC_WARN_UNUSED_VARIABLE = YES;
				IPHONEOS_DEPLOYMENT_TARGET = 13.0;
				MTL_ENABLE_DEBUG_INFO = YES;
				ONLY_ACTIVE_ARCH = YES;
				SDKROOT = iphoneos;
				TARGETED_DEVICE_FAMILY = "1,2";
			};
			name = Debug;
		};
		97C147041CF9000F007C117D /* Release */ = {
			isa = XCBuildConfiguration;
			buildSettings = {
				ALWAYS_SEARCH_USER_PATHS = NO;
				ASSETCATALOG_COMPILER_GENERATE_SWIFT_ASSET_SYMBOL_EXTENSIONS = YES;
				CLANG_ANALYZER_NONNULL = YES;
				CLANG_CXX_LANGUAGE_STANDARD = "gnu++0x";
				CLANG_CXX_LIBRARY = "libc++";
				CLANG_ENABLE_MODULES = YES;
				CLANG_ENABLE_OBJC_ARC = YES;
				CLANG_WARN_BLOCK_CAPTURE_AUTORELEASING = YES;
				CLANG_WARN_BOOL_CONVERSION = YES;
				CLANG_WARN_COMMA = YES;
				CLANG_WARN_CONSTANT_CONVERSION = YES;
				CLANG_WARN_DEPRECATED_OBJC_IMPLEMENTATIONS = YES;
				CLANG_WARN_DIRECT_OBJC_ISA_USAGE = YES_ERROR;
				CLANG_WARN_EMPTY_BODY = YES;
				CLANG_WARN_ENUM_CONVERSION = YES;
				CLANG_WARN_INFINITE_RECURSION = YES;
				CLANG_WARN_INT_CONVERSION = YES;
				CLANG_WARN_NON_LITERAL_NULL_CONVERSION = YES;
				CLANG_WARN_OBJC_IMPLICIT_RETAIN_SELF = YES;
				CLANG_WARN_OBJC_LITERAL_CONVERSION = YES;
				CLANG_WARN_OBJC_ROOT_CLASS = YES_ERROR;
				CLANG_WARN_RANGE_LOOP_ANALYSIS = YES;
				CLANG_WARN_STRICT_PROTOTYPES = YES;
				CLANG_WARN_SUSPICIOUS_MOVE = YES;
				CLANG_WARN_UNREACHABLE_CODE = YES;
				CLANG_WARN__DUPLICATE_METHOD_MATCH = YES;
				"CODE_SIGN_IDENTITY[sdk=iphoneos*]" = "iPhone Developer";
				COPY_PHASE_STRIP = NO;
				DEBUG_INFORMATION_FORMAT = "dwarf-with-dsym";
				ENABLE_NS_ASSERTIONS = NO;
				ENABLE_STRICT_OBJC_MSGSEND = YES;
				ENABLE_USER_SCRIPT_SANDBOXING = NO;
				GCC_C_LANGUAGE_STANDARD = gnu99;
				GCC_NO_COMMON_BLOCKS = YES;
				GCC_WARN_64_TO_32_BIT_CONVERSION = YES;
				GCC_WARN_ABOUT_RETURN_TYPE = YES_ERROR;
				GCC_WARN_UNDECLARED_SELECTOR = YES;
				GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE;
				GCC_WARN_UNUSED_FUNCTION = YES;
				GCC_WARN_UNUSED_VARIABLE = YES;
				IPHONEOS_DEPLOYMENT_TARGET = 13.0;
				MTL_ENABLE_DEBUG_INFO = NO;
				SDKROOT = iphoneos;
				SUPPORTED_PLATFORMS = iphoneos;
				SWIFT_COMPILATION_MODE = wholemodule;
				SWIFT_OPTIMIZATION_LEVEL = "-O";
				TARGETED_DEVICE_FAMILY = "1,2";
				VALIDATE_PRODUCT = YES;
			};
			name = Release;
		};
		97C147061CF9000F007C117D /* Debug */ = {
			isa = XCBuildConfiguration;
			baseConfigurationReference = 9740EEB21CF90195004384FC /* Debug.xcconfig */;
			buildSettings = {
				ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon;
				CLANG_ENABLE_MODULES = YES;
				CURRENT_PROJECT_VERSION = "$(FLUTTER_BUILD_NUMBER)";
				DEVELOPMENT_TEAM = QNW744Q794;
				ENABLE_BITCODE = NO;
				INFOPLIST_FILE = Runner/Info.plist;
				LD_RUNPATH_SEARCH_PATHS = (
					"$(inherited)",
					"@executable_path/Frameworks",
				);
				PRODUCT_BUNDLE_IDENTIFIER = com.example.eviExample;
				PRODUCT_NAME = "$(TARGET_NAME)";
				SWIFT_OBJC_BRIDGING_HEADER = "Runner/Runner-Bridging-Header.h";
				SWIFT_OPTIMIZATION_LEVEL = "-Onone";
				SWIFT_VERSION = 5.0;
				VERSIONING_SYSTEM = "apple-generic";
			};
			name = Debug;
		};
		97C147071CF9000F007C117D /* Release */ = {
			isa = XCBuildConfiguration;
			baseConfigurationReference = 7AFA3C8E1D35360C0083082E /* Release.xcconfig */;
			buildSettings = {
				ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon;
				CLANG_ENABLE_MODULES = YES;
				CURRENT_PROJECT_VERSION = "$(FLUTTER_BUILD_NUMBER)";
				DEVELOPMENT_TEAM = QNW744Q794;
				ENABLE_BITCODE = NO;
				INFOPLIST_FILE = Runner/Info.plist;
				LD_RUNPATH_SEARCH_PATHS = (
					"$(inherited)",
					"@executable_path/Frameworks",
				);
				PRODUCT_BUNDLE_IDENTIFIER = com.example.eviExample;
				PRODUCT_NAME = "$(TARGET_NAME)";
				SWIFT_OBJC_BRIDGING_HEADER = "Runner/Runner-Bridging-Header.h";
				SWIFT_VERSION = 5.0;
				VERSIONING_SYSTEM = "apple-generic";
			};
			name = Release;
		};
/* End XCBuildConfiguration section */

/* Begin XCConfigurationList section */
		331C8087294A63A400263BE5 /* Build configuration list for PBXNativeTarget "RunnerTests" */ = {
			isa = XCConfigurationList;
			buildConfigurations = (
				331C8088294A63A400263BE5 /* Debug */,
				331C8089294A63A400263BE5 /* Release */,
				331C808A294A63A400263BE5 /* Profile */,
			);
			defaultConfigurationIsVisible = 0;
			defaultConfigurationName = Release;
		};
		97C146E91CF9000F007C117D /* Build configuration list for PBXProject "Runner" */ = {
			isa = XCConfigurationList;
			buildConfigurations = (
				97C147031CF9000F007C117D /* Debug */,
				97C147041CF9000F007C117D /* Release */,
				249021D3217E4FDB00AE95B9 /* Profile */,
			);
			defaultConfigurationIsVisible = 0;
			defaultConfigurationName = Release;
		};
		97C147051CF9000F007C117D /* Build configuration list for PBXNativeTarget "Runner" */ = {
			isa = XCConfigurationList;
			buildConfigurations = (
				97C147061CF9000F007C117D /* Debug */,
				97C147071CF9000F007C117D /* Release */,
				249021D4217E4FDB00AE95B9 /* Profile */,
			);
			defaultConfigurationIsVisible = 0;
			defaultConfigurationName = Release;
		};
/* End XCConfigurationList section */
	};
	rootObject = 97C146E61CF9000F007C117D /* Project object */;
}


================================================
FILE: evi/evi-flutter/ios/Runner.xcodeproj/project.xcworkspace/contents.xcworkspacedata
================================================
<?xml version="1.0" encoding="UTF-8"?>
<Workspace
   version = "1.0">
   <FileRef
      location = "self:">
   </FileRef>
</Workspace>


================================================
FILE: evi/evi-flutter/ios/Runner.xcodeproj/project.xcworkspace/xcshareddata/IDEWorkspaceChecks.plist
================================================
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0">
<dict>
	<key>IDEDidComputeMac32BitWarning</key>
	<true/>
</dict>
</plist>


================================================
FILE: evi/evi-flutter/ios/Runner.xcodeproj/project.xcworkspace/xcshareddata/WorkspaceSettings.xcsettings
================================================
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0">
<dict>
	<key>PreviewsEnabled</key>
	<false/>
</dict>
</plist>


================================================
FILE: evi/evi-flutter/ios/Runner.xcodeproj/xcshareddata/xcschemes/Runner.xcscheme
================================================
<?xml version="1.0" encoding="UTF-8"?>
<Scheme
   LastUpgradeVersion = "1510"
   version = "1.3">
   <BuildAction
      parallelizeBuildables = "YES"
      buildImplicitDependencies = "YES">
      <BuildActionEntries>
         <BuildActionEntry
            buildForTesting = "YES"
            buildForRunning = "YES"
            buildForProfiling = "YES"
            buildForArchiving = "YES"
            buildForAnalyzing = "YES">
            <BuildableReference
               BuildableIdentifier = "primary"
               BlueprintIdentifier = "97C146ED1CF9000F007C117D"
               BuildableName = "Runner.app"
               BlueprintName = "Runner"
               ReferencedContainer = "container:Runner.xcodeproj">
            </BuildableReference>
         </BuildActionEntry>
      </BuildActionEntries>
   </BuildAction>
   <TestAction
      buildConfiguration = "Debug"
      selectedDebuggerIdentifier = "Xcode.DebuggerFoundation.Debugger.LLDB"
      selectedLauncherIdentifier = "Xcode.DebuggerFoundation.Launcher.LLDB"
      shouldUseLaunchSchemeArgsEnv = "YES">
      <MacroExpansion>
         <BuildableReference
            BuildableIdentifier = "primary"
            BlueprintIdentifier = "97C146ED1CF9000F007C117D"
            BuildableName = "Runner.app"
            BlueprintName = "Runner"
            ReferencedContainer = "container:Runner.xcodeproj">
         </BuildableReference>
      </MacroExpansion>
      <Testables>
         <TestableReference
            skipped = "NO"
            parallelizable = "YES">
            <BuildableReference
               BuildableIdentifier = "primary"
               BlueprintIdentifier = "331C8080294A63A400263BE5"
               BuildableName = "RunnerTests.xctest"
               BlueprintName = "RunnerTests"
               ReferencedContainer = "container:Runner.xcodeproj">
            </BuildableReference>
         </TestableReference>
      </Testables>
   </TestAction>
   <LaunchAction
      buildConfiguration = "Debug"
      selectedDebuggerIdentifier = "Xcode.DebuggerFoundation.Debugger.LLDB"
      selectedLauncherIdentifier = "Xcode.DebuggerFoundation.Launcher.LLDB"
      launchStyle = "0"
      useCustomWorkingDirectory = "NO"
      ignoresPersistentStateOnLaunch = "NO"
      debugDocumentVersioning = "YES"
      debugServiceExtension = "internal"
      allowLocationSimulation = "YES">
      <BuildableProductRunnable
         runnableDebuggingMode = "0">
         <BuildableReference
            BuildableIdentifier = "primary"
            BlueprintIdentifier = "97C146ED1CF9000F007C117D"
            BuildableName = "Runner.app"
            BlueprintName = "Runner"
            ReferencedContainer = "container:Runner.xcodeproj">
         </BuildableReference>
      </BuildableProductRunnable>
   </LaunchAction>
   <ProfileAction
      buildConfiguration = "Profile"
      shouldUseLaunchSchemeArgsEnv = "YES"
      savedToolIdentifier = ""
      useCustomWorkingDirectory = "NO"
      debugDocumentVersioning = "YES">
      <BuildableProductRunnable
         runnableDebuggingMode = "0">
         <BuildableReference
            BuildableIdentifier = "primary"
            BlueprintIdentifier = "97C146ED1CF9000F007C117D"
            BuildableName = "Runner.app"
            BlueprintName = "Runner"
            ReferencedContainer = "container:Runner.xcodeproj">
         </BuildableReference>
      </BuildableProductRunnable>
   </ProfileAction>
   <AnalyzeAction
      buildConfiguration = "Debug">
   </AnalyzeAction>
   <ArchiveAction
      buildConfiguration = "Release"
      revealArchiveInOrganizer = "YES">
   </ArchiveAction>
</Scheme>


================================================
FILE: evi/evi-flutter/ios/Runner.xcworkspace/contents.xcworkspacedata
================================================
<?xml version="1.0" encoding="UTF-8"?>
<Workspace
   version = "1.0">
   <FileRef
      location = "group:Runner.xcodeproj">
   </FileRef>
   <FileRef
      location = "group:Pods/Pods.xcodeproj">
   </FileRef>
</Workspace>


================================================
FILE: evi/evi-flutter/ios/Runner.xcworkspace/xcshareddata/IDEWorkspaceChecks.plist
================================================
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0">
<dict>
	<key>IDEDidComputeMac32BitWarning</key>
	<true/>
</dict>
</plist>


================================================
FILE: evi/evi-flutter/ios/Runner.xcworkspace/xcshareddata/WorkspaceSettings.xcsettings
================================================
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0">
<dict>
	<key>PreviewsEnabled</key>
	<false/>
</dict>
</plist>


================================================
FILE: evi/evi-flutter/ios/RunnerTests/RunnerTests.swift
================================================
import Flutter
import UIKit
import XCTest

class RunnerTests: XCTestCase {

  func testExample() {
    // If you add code to the Runner application, consider adding tests here.
    // See https://developer.apple.com/documentation/xctest for more information about using XCTest.
  }

}


================================================
FILE: evi/evi-flutter/ios/build/ios/XCBuildData/PIFCache/workspace/WORKSPACE@v11_hash=(null)_subobjects=4483c0dac1d2a63621e8a5d74e580a19-json
================================================
{"guid":"dc4b70c03e8043e50e38f2068887b1d4","name":"Pods","path":"/Users/twitchard/dev/hume-api-examples/evi-flutter-example/ios/Pods/Pods.xcodeproj/project.xcworkspace","projects":["PROJECT@v11_mod=1737593281.485423_hash=bfdfe7dc352907fc980b868725387e98plugins=1OJSG6M1FOV3XYQCBH7Z29RZ0FPR9XDE1"]}

================================================
FILE: evi/evi-flutter/lib/chat_card.dart
================================================
import 'dart:core';

import 'package:flutter/material.dart';
import 'theme.dart';

enum Role { user, assistant }

class Score {
  final String emotion;
  final double score;

  Score({required this.emotion, required this.score});

  Map<String, dynamic> toJson() {
    return {
      'emotion': emotion,
      'score': score,
    };
  }
}

class ChatEntry {
  final Role role;
  final String timestamp;
  final String content;
  final List<Score> scores;

  ChatEntry(
      {required this.role,
      required this.timestamp,
      required this.content,
      required this.scores});
}

class ChatCard extends StatelessWidget {
  final ChatEntry message;
  const ChatCard({super.key, required this.message});

  @override
  Widget build(BuildContext context) {
    final alignment = message.role == Role.user
        ? CrossAxisAlignment.end
        : CrossAxisAlignment.start;

    return Padding(
      padding: const EdgeInsets.symmetric(vertical: 8.0, horizontal: 16.0),
      child: Align(
        alignment: message.role == Role.user
            ? Alignment.centerRight
            : Alignment.centerLeft,
        child: Card(
          elevation: 2,
          color: message.role == Role.user ? accentBlue200 : white,
          shape: RoundedRectangleBorder(
            borderRadius: BorderRadius.circular(8.0),
          ),
          child: Padding(
            padding: const EdgeInsets.all(12.0),
            child: Column(
              crossAxisAlignment: alignment,
              children: [
                Text(
                  message.content,
                  style: TextStyle(fontSize: 16),
                ),
                const SizedBox(height: 8),
                Text(
                  message.scores
                      .map((score) =>
                          "${score.emotion} (${score.score.toStringAsFixed(1)})")
                      .join(", "),
                  style: TextStyle(
                    fontSize: 12,
                    color: Colors.grey[600],
                  ),
                ),
              ],
            ),
          ),
        ),
      ),
    );
  }
}

class ChatDisplay extends StatelessWidget {
  final List<ChatEntry> entries;
  const ChatDisplay({super.key, required this.entries});

  @override
  Widget build(BuildContext context) {
    return Padding(
      padding: const EdgeInsets.all(16.0),
      child: ListView.builder(
        itemCount: entries.length,
        itemBuilder: (context, index) {
          return ChatCard(message: entries[index]);
        },
      ),
    );
  }
}


================================================
FILE: evi/evi-flutter/lib/evi_message.dart
================================================
import 'dart:convert';

// Represents an incoming message sent from the /v0/evi/chat websocket endpoint of
// the Hume API. This example includes only messages and properties that are used in the example.
// You should add more messages and properties to this datatype as needed.
// See https://hume.docs.buildwithfern.com/reference/empathic-voice-interface-evi/chat/chat#receive
// for the full list of messages and their properties.
//
// You can also use the Typescript SDK as a useful reference:
// https://github.com/HumeAI/hume-typescript-sdk/blob/da8820dfef2a30e0745a6ae86987b090a5ba0e6e/src/api/resources/empathicVoice/types/JsonMessage.ts#L7
sealed class EviMessage {
  final String type;
  final Map<String, dynamic> rawJson;

  EviMessage._(this.type, this.rawJson);

  factory EviMessage.decode(String text) {
    final json = jsonDecode(text) as Map<String, dynamic>;
    final type = json['type'] as String;
    switch (type) {
      case 'error':
        return ErrorMessage(json);
      case 'chat_metadata':
        return ChatMetadataMessage(json);
      case 'audio_output':
        return AudioOutputMessage(json);
      case 'user_interruption':
        return UserInterruptionMessage(json);
      case 'assistant_message':
        return AssistantMessage(json);
      case 'user_message':
        return UserMessage(json);
      default:
        return UnknownMessage(json);
    }
  }
}

class ErrorMessage extends EviMessage {
  final String message;
  ErrorMessage(json)
      : message = json['message'],
        super._('chat_metadata', json);
}

class ChatMetadataMessage extends EviMessage {
  ChatMetadataMessage(json) : super._('chat_metadata', json);
}

class AudioOutputMessage extends EviMessage {
  final String data;
  AudioOutputMessage(json)
      : data = json['data'],
        super._('audio_output', json);
}

class UserInterruptionMessage extends EviMessage {
  UserInterruptionMessage(json) : super._('user_interruption', json);
}

class ChatMessage {
  final String role;
  final String content;
  ChatMessage(json)
      : role = json['role'],
        content = json['content'];
}

class ProsodyInference {
  final Map<String, double> scores;
  ProsodyInference(json) : scores = json['scores'].cast<String, double>();
}

class Inference {
  final ProsodyInference? prosody;
  Inference(json) : prosody = ProsodyInference(json['prosody']);
}

class AssistantMessage extends EviMessage {
  final ChatMessage message;
  final Inference models;
  AssistantMessage(json)
      : message = ChatMessage(json['message']),
        models = Inference(json['models']),
        super._('assistant_message', json);
}

class UserMessage extends EviMessage {
  final ChatMessage message;
  final Inference models;
  UserMessage(json)
      : message = ChatMessage(json['message']),
        models = Inference(json['models']),
        super._('user_message', json);
}

class UnknownMessage extends EviMessage {
  UnknownMessage(json) : super._(json['type'], json);
}


================================================
FILE: evi/evi-flutter/lib/main.dart
================================================
import 'dart:convert';

import 'package:flutter/material.dart';
import 'package:web_socket_channel/web_socket_channel.dart';
import 'package:http/http.dart' as http;
import 'package:flutter_dotenv/flutter_dotenv.dart';
import 'package:audio/audio.dart';

import 'theme.dart';
import 'chat_card.dart';
import 'evi_message.dart' as evi;

class ConfigManager {
  static final ConfigManager _instance = ConfigManager._internal();

  String humeApiKey = "";
  String humeAccessToken = "";
  late final String humeConfigId;

  ConfigManager._internal();

  static ConfigManager get instance => _instance;

  // WARNING! For development only. In production, the app should hit your own backend server to get an access token, using "token authentication" (see https://dev.hume.ai/docs/introduction/api-key#token-authentication)
  String fetchHumeApiKey() {
    return dotenv.env['HUME_API_KEY'] ?? "";
  }

  Future<String> fetchAccessToken() async {
    // Make a get request to dotenv.env['MY_SERVER_URL'] to get the access token
    final authUrl = dotenv.env['MY_SERVER_AUTH_URL'];
    if (authUrl == null) {
      throw Exception('Please set MY_SERVER_AUTH_URL in your .env file');
    }
    final url = Uri.parse(authUrl);
    final response = await http.get(url);
    if (response.statusCode == 200) {
      return jsonDecode(response.body)['access_token'];
    } else {
      throw Exception('Failed to load access token');
    }
  }

  Future<void> loadConfig() async {
    // Make sure to create a .env file in your root directory which mirrors the .env.example file
    // and add your API key and an optional EVI config ID.
    await dotenv.load();

    // WARNING! For development only.
    humeApiKey = fetchHumeApiKey();

    // Uncomment this to use an access token in production.
    // humeAccessToken = await fetchAccessToken();
    humeConfigId = dotenv.env['HUME_CONFIG_ID'] ?? '';
  }
}

void main() async {
  // Ensure Flutter binding is initialized before calling asynchronous operations
  WidgetsFlutterBinding.ensureInitialized();

  // Load config in singleton
  await ConfigManager.instance.loadConfig();

  runApp(MyApp());
}

class MyApp extends StatelessWidget {
  const MyApp({super.key});

  @override
  Widget build(BuildContext context) {
    if (ConfigManager.instance.humeApiKey.isEmpty &&
        ConfigManager.instance.humeAccessToken.isEmpty) {
      return MaterialApp(
          title: 'Flutter with EVI',
          home: ErrorMessage(
            message:
                "Error: Please set your Hume API key in main.dart (or use fetchAccessToken)",
          ),
          theme: appTheme);
    }
    return MaterialApp(
      title: 'Flutter with EVI',
      home: MyHomePage(title: 'Flutter with EVI'),
      theme: appTheme,
    );
  }

  static List<Score> extractTopThreeEmotions(evi.Inference models) {
    // extract emotion scores from the message
    final scores = models.prosody?.scores ?? {};

    // convert the emotions object into an array of key-value pairs
    final scoresArray = scores.entries.toList();

    // sort the array by the values in descending order
    scoresArray.sort((a, b) => b.value.compareTo(a.value));

    // extract the top three emotions and convert them back to an object
    final topThreeEmotions = scoresArray.take(3).map((entry) {
      return Score(emotion: entry.key, score: entry.value);
    }).toList();

    return topThreeEmotions;
  }
}

class ErrorMessage extends StatelessWidget {
  final String message;

  const ErrorMessage({super.key, required this.message});

  @override
  Widget build(BuildContext context) {
    return Center(
      child: Text(
        message,
        style: Theme.of(context).textTheme.headlineLarge,
      ),
    );
  }
}

class MyHomePage extends StatefulWidget {
  final String title;

  const MyHomePage({super.key, required this.title});

  @override
  State<MyHomePage> createState() => _MyHomePageState();
}

class _MyHomePageState extends State<MyHomePage> {
  // define config here for recorder
  final Audio _audio = Audio();
  WebSocketChannel? _chatChannel;
  bool _isConnected = false;
  bool _isMuted = false;
  var chatEntries = <ChatEntry>[];

  // EVI sends back transcripts of both the user's speech and the assistants speech, along
  // with an analysis of the emotional content of the speech. This method takes
  // of a message from EVI, parses it into a `ChatMessage` type and adds it to `chatEntries` so
  // it can be displayed.
  void appendNewChatMessage(evi.ChatMessage chatMessage, evi.Inference models) {
    final role = chatMessage.role == 'assistant' ? Role.assistant : Role.user;
    final entry = ChatEntry(
        role: role,
        timestamp: DateTime.now().toString(),
        content: chatMessage.content,
        scores: MyApp.extractTopThreeEmotions(models));
    setState(() {
      chatEntries.add(entry);
    });
  }

  @override
  Widget build(BuildContext context) {
    final muteButton = _isMuted
        ? ElevatedButton(
            onPressed: _unmuteInput,
            child: const Text('Unmute'),
          )
        : ElevatedButton(
            onPressed: _muteInput,
            child: const Text('Mute'),
          );
    final connectButton = _isConnected
        ? ElevatedButton(
            onPressed: _disconnect,
            child: const Text('Disconnect'),
          )
        : ElevatedButton(
            onPressed: _connect,
            child: const Text('Connect'),
          );
    return Scaffold(
      appBar: AppBar(
        backgroundColor: Theme.of(context).colorScheme.inversePrimary,
        title: Text(widget.title),
      ),
      body: Center(
          child: ConstrainedBox(
              constraints: BoxConstraints(maxWidth: 600),
              child: Column(
                  mainAxisAlignment: MainAxisAlignment.center,
                  children: <Widget>[
                    Text(
                      'You are ${_isConnected ? 'connected' : 'disconnected'}',
                      style: const TextStyle(
                          fontSize: 18, fontWeight: FontWeight.bold),
                    ),
                    Expanded(child: ChatDisplay(entries: chatEntries)),
                    Padding(
                        padding: const EdgeInsets.all(8.0),
                        child: Row(
                            mainAxisAlignment: MainAxisAlignment.spaceEvenly,
                            children: <Widget>[connectButton, muteButton]))
                  ]))),
    );
  }

  @override
  void dispose() {
    _audio.dispose();
    super.dispose();
  }

  // Opens a websocket connection to the EVI API and registers a listener to handle
  // incoming messages.
  void _connect() {
    setState(() {
      _isConnected = true;
    });
    if (ConfigManager.instance.humeApiKey.isNotEmpty &&
        ConfigManager.instance.humeAccessToken.isNotEmpty) {
      throw Exception(
          'Please use either an API key or an access token, not both');
    }

    var uri = 'wss://api.hume.ai/v0/evi/chat';
    if (ConfigManager.instance.humeAccessToken.isNotEmpty) {
      uri += '?access_token=${ConfigManager.instance.humeAccessToken}';
    } else if (ConfigManager.instance.humeApiKey.isNotEmpty) {
      uri += '?api_key=${ConfigManager.instance.humeApiKey}';
    } else {
      throw Exception('Please set your Hume API credentials in main.dart');
    }

    if (ConfigManager.instance.humeConfigId.isNotEmpty) {
      uri += "&config_id=${ConfigManager.instance.humeConfigId}";
    }

    _chatChannel = WebSocketChannel.connect(Uri.parse(uri));

    _chatChannel!.stream.listen(
      (event) async {
        final message = evi.EviMessage.decode(event);
        debugPrint("Received message: ${message.type}");
        // This message contains audio data for playback.
        switch (message) {
          case (evi.ErrorMessage errorMessage):
            debugPrint("Error: ${errorMessage.message}");
            break;
          case (evi.ChatMetadataMessage chatMetadataMessage):
            debugPrint("Chat metadata: ${chatMetadataMessage.rawJson}");
            _prepareAudioSettings();
            _startRecording();
            break;
          case (evi.AudioOutputMessage audioOutputMessage):
            _audio.enqueueAudio(audioOutputMessage.data);
            break;
          case (evi.UserInterruptionMessage _):
            _handleInterruption();
            break;
          // These messages contain the transcript text of the user's or the assistant's speech
          // as well as emotional analysis of the speech.
          case (evi.AssistantMessage assistantMessage):
            appendNewChatMessage(
                assistantMessage.message, assistantMessage.models);
            break;
          case (evi.UserMessage userMessage):
            appendNewChatMessage(userMessage.message, userMessage.models);
            _handleInterruption();
            break;
          case (evi.UnknownMessage unknownMessage):
            debugPrint("Unknown message: ${unknownMessage.rawJson}");
            break;
        }
      },
      onError: (error) {
        debugPrint("Connection error: $error");
        _handleConnectionClosed();
      },
      onDone: () {
        debugPrint("Connection closed");
        _handleConnectionClosed();
      },
    );

    debugPrint("Connected");
  }

  void _disconnect() {
    _handleConnectionClosed();
    _handleInterruption();
    _chatChannel?.sink.close();
    debugPrint("Disconnected");
  }


  void _handleConnectionClosed() {
    setState(() {
      _isConnected = false;
    });
    _stopRecording();
  }

  void _handleInterruption() {
    _audio.stopPlayback();
  }

  void _muteInput() {
    _stopRecording();
    setState(() {
      _isMuted = true;
    });
  }

  void _prepareAudioSettings() {
    // set session settings to prepare EVI for receiving linear16 encoded audio
    // https://dev.hume.ai/docs/empathic-voice-interface-evi/configuration#session-settings
    _chatChannel!.sink.add(jsonEncode({
      'type': 'session_settings',
      'audio': {
        'encoding': 'linear16',
        'sample_rate': 48000,
        'channels': 1,
      },
    }));
  }

  void _sendAudio(String base64) {
    _chatChannel!.sink.add(jsonEncode({
      'type': 'audio_input',
      'data': base64,
    }));
  }

  void _startRecording() async {
    await _audio.startRecording();

    _audio.audioStream.listen((data) async {
      _sendAudio(data);
    });
    _audio.audioStream.handleError((error) {
      debugPrint("Error recording audio: $error");
    });
  }

  void _stopRecording() {
    _audio.stopRecording();
  }

  void _unmuteInput() {
    _startRecording();
    setState(() {
      _isMuted = false;
    });
  }
}


================================================
FILE: evi/evi-flutter/lib/theme.dart
================================================
import 'package:flutter/material.dart';

// From CSS variables on hume.ai
const Color white = Color.fromRGBO(255, 255, 255, 1);
const Color humeBlack900 = Color.fromRGBO(26, 26, 26, 1);
const Color humeTan400 = Color.fromRGBO(255, 244, 232, 1);
const Color accentOrange200 = Color.fromRGBO(255, 219, 176, 1);
const Color accentBlue200 = Color.fromRGBO(209, 226, 243, 1);

ThemeData appTheme = ThemeData(
  scaffoldBackgroundColor: humeTan400,
  colorScheme: ColorScheme.light(
  primary: white,
  inversePrimary: accentOrange200,
  surface: humeBlack900,
  ),
);


================================================
FILE: evi/evi-flutter/pubspec.yaml
================================================
name: evi_example
description: "A new Flutter project."
# The following line prevents the package from being accidentally published to
# pub.dev using `flutter pub publish`. This is preferred for private packages.
publish_to: 'none' # Remove this line if you wish to publish to pub.dev

# The following defines the version and build number for your application.
# A version number is three numbers separated by dots, like 1.2.43
# followed by an optional build number separated by a +.
# Both the version and the builder number may be overridden in flutter
# build by specifying --build-name and --build-number, respectively.
# In Android, build-name is used as versionName while build-number used as versionCode.
# Read more about Android versioning at https://developer.android.com/studio/publish/versioning
# In iOS, build-name is used as CFBundleShortVersionString while build-number is used as CFBundleVersion.
# Read more about iOS versioning at
# https://developer.apple.com/library/archive/documentation/General/Reference/InfoPlistKeyReference/Articles/CoreFoundationKeys.html
# In Windows, build-name is used as the major, minor, and patch parts
# of the product and file versions while build-number is used as the build suffix.
version: 1.0.0+1

environment:
  sdk: ^3.5.1

# Dependencies specify other packages that your package needs in order to work.
# To automatically upgrade your package dependencies to the latest versions
# consider running `flutter pub upgrade --major-versions`. Alternatively,
# dependencies can be manually updated by changing the version numbers below to
# the latest version available on pub.dev. To see which dependencies have newer
# versions available, run `flutter pub outdated`.
dependencies:
  flutter:
    sdk: flutter
    
  # Supports environment variables
  flutter_dotenv: ^5.2.1

  # The following adds the Cupertino Icons font to your application.
  # Use with the CupertinoIcons class for iOS style icons.
  cupertino_icons: ^1.0.8
  web_socket_channel: ^3.0.1
  record: ^5.1.2
  audio:
    path: ./audio
  http: ^1.2.2

dev_dependencies:
  flutter_test:
    sdk: flutter

  # The "flutter_lints" package below contains a set of recommended lints to
  # encourage good coding practices. The lint set provided by the package is
  # activated in the `analysis_options.yaml` file located at the root of your
  # package. See that file for information about deactivating specific lint
  # rules and activating additional ones.
  flutter_lints: ^5.0.0

# For information on the generic Dart part of this file, see the
# following page: https://dart.dev/tools/pub/pubspec

# The following section is specific to Flutter packages.
flutter:
  assets:
      - .env

  # The following line ensures that the Material Icons font is
  # included with your application, so that you can use the icons in
  # the material Icons class.
  uses-material-design: true

  # To add assets to your application, add an assets section, like this:
  # assets:
  #   - images/a_dot_burr.jpeg
  #   - images/a_dot_ham.jpeg

  # An image asset can refer to one or more resolution-specific "variants", see
  # https://flutter.dev/to/resolution-aware-images

  # For details regarding adding assets from package dependencies, see
  # https://flutter.dev/to/asset-from-package

  # To add custom fonts to your application, add a fonts section here,
  # in this "flutter" section. Each entry in this list should have a
  # "family" key with the font family name, and a "fonts" key with a
  # list giving the asset and other descriptors for the font. For
  # example:
  # fonts:
  #   - family: Schyler
  #     fonts:
  #       - asset: fonts/Schyler-Regular.ttf
  #       - asset: fonts/Schyler-Italic.ttf
  #         style: italic
  #   - family: Trajan Pro
  #     fonts:
  #       - asset: fonts/TrajanPro.ttf
  #       - asset: fonts/TrajanPro_Bold.ttf
  #         weight: 700
  #
  # For details regarding fonts from package dependencies,
  # see https://flutter.dev/to/font-from-package


================================================
FILE: evi/evi-flutter/test/widget_test.dart
================================================
// This is a basic Flutter widget test.
//
// To perform an interaction with a widget in your test, use the WidgetTester
// utility in the flutter_test package. For example, you can send tap and scroll
// gestures. You can also use WidgetTester to find child widgets in the widget
// tree, read text, and verify that the values of widget properties are correct.

import 'package:flutter/material.dart';
import 'package:flutter_test/flutter_test.dart';

import 'package:evi_example/main.dart';

void main() {
  testWidgets('Counter increments smoke test', (WidgetTester tester) async {
    // Build our app and trigger a frame.
    await tester.pumpWidget(const MyApp());

    // Verify that our counter starts at 0.
    expect(find.text('0'), findsOneWidget);
    expect(find.text('1'), findsNothing);

    // Tap the '+' icon and trigger a frame.
    await tester.tap(find.byIcon(Icons.add));
    await tester.pump();

    // Verify that our counter has incremented.
    expect(find.text('0'), findsNothing);
    expect(find.text('1'), findsOneWidget);
  });
}


================================================
FILE: evi/evi-flutter/web/index.html
================================================
<!DOCTYPE html>
<html>
<head>
  <!--
    If you are serving your web app in a path other than the root, change the
    href value below to reflect the base path you are serving from.

    The path provided below has to start and end with a slash "/" in order for
    it to work correctly.

    For more details:
    * https://developer.mozilla.org/en-US/docs/Web/HTML/Element/base

    This is a placeholder for base href that will be replaced by the value of
    the `--base-href` argument provided to `flutter build`.
  -->
  <base href="$FLUTTER_BASE_HREF">

  <meta charset="UTF-8">
  <meta content="IE=Edge" http-equiv="X-UA-Compatible">
  <meta name="description" content="A new Flutter project.">

  <!-- iOS meta tags & icons -->
  <meta name="apple-mobile-web-app-capable" content="yes">
  <meta name="apple-mobile-web-app-status-bar-style" content="black">
  <meta name="apple-mobile-web-app-title" content="evi_example">
  <link rel="apple-touch-icon" href="icons/Icon-192.png">

  <!-- Favicon -->
  <link rel="icon" type="image/png" href="favicon.png"/>

  <title>evi_example</title>
  <link rel="manifest" href="manifest.json">
</head>
<body>
  <script src="flutter_bootstrap.js" async></script>
</body>
</html>


================================================
FILE: evi/evi-flutter/web/manifest.json
================================================
{
    "name": "evi_example",
    "short_name": "evi_example",
    "start_url": ".",
    "display": "standalone",
    "background_color": "#0175C2",
    "theme_color": "#0175C2",
    "description": "A new Flutter project.",
    "orientation": "portrait-primary",
    "prefer_related_applications": false,
    "icons": [
        {
            "src": "icons/Icon-192.png",
            "sizes": "192x192",
            "type": "image/png"
        },
        {
            "src": "icons/Icon-512.png",
            "sizes": "512x512",
            "type": "image/png"
        },
        {
            "src": "icons/Icon-maskable-192.png",
            "sizes": "192x192",
            "type": "image/png",
            "purpose": "maskable"
        },
        {
            "src": "icons/Icon-maskable-512.png",
            "sizes": "512x512",
            "type": "image/png",
            "purpose": "maskable"
        }
    ]
}


================================================
FILE: evi/evi-next-js-app-router-quickstart/.eslintrc.json
================================================
{
  "extends": "next/core-web-vitals"
}


================================================
FILE: evi/evi-next-js-app-router-quickstart/.gitignore
================================================
# See https://help.github.com/articles/ignoring-files/ for more about ignoring files.

# dependencies
/node_modules
/.pnp
.pnp.js
.yarn/install-state.gz

# testing
/coverage
/test-results/
/playwright-report/

# next.js
/.next/
/out/

# production
/build

# misc
.DS_Store
*.pem

# debug
npm-debug.log*
yarn-debug.log*
yarn-error.log*

# local env files
.env*.local
.env

# vercel
.vercel

# typescript
*.tsbuildinfo
next-env.d.ts


================================================
FILE: evi/evi-next-js-app-router-quickstart/.prettierrc.json
================================================
{}


================================================
FILE: evi/evi-next-js-app-router-quickstart/README.md
================================================
<div align="center">
  <img src="https://storage.googleapis.com/hume-public-logos/hume/hume-banner.png">
  <h1>Empathic Voice Interface | Next.js App Router Quickstart</h1>
</div>

![preview.png](preview.png)

## Overview

This project features a sample implementation of Hume's [Empathic Voice Interface](https://dev.hume.ai/docs/empathic-voice-interface-evi/overview) using Hume's [React SDK](https://github.com/HumeAI/empathic-voice-api-js/tree/main/packages/react). Here, we have a simple EVI that uses the Next.js App Router.

See the [Quickstart guide](https://dev.hume.ai/docs/empathic-voice-interface-evi/quickstart/nextjs) for a detailed explanation of the code in this project.

## Project deployment

Click the button below to deploy this example project with Vercel:

[![Deploy with Vercel](https://vercel.com/button)](https://vercel.com/new/clone?repository-url=https%3A%2F%2Fgithub.com%2Fhumeai%2Fhume-evi-next-js-starter&env=HUME_API_KEY,HUME_CLIENT_SECRET)

Below are the steps to completing deployment:

1. Create a Git Repository for your project.
2. Provide the required environment variables. To get your API key and Secret key, log into the Hume AI Platform and visit the [API keys page](https://app.hume.ai/keys).

## Modify the project

1. Clone this examples repository:

   ```shell
   git clone https://github.com/humeai/hume-api-examples
   cd hume-api-examples/evi/evi-next-js-app-router-quickstart
   ```

2. Install dependencies:

   ```shell
   npm install
   ```

3. Set up your API key and Secret key:

   In order to make an authenticated connection we will first need to generate an access token. Doing so will require your API key and Secret key. These keys can be obtained by logging into the Hume AI Platform and visiting the [API keys page](https://app.hume.ai/keys). For detailed instructions, see our documentation on [getting your api keys](https://dev.hume.ai/docs/introduction/api-key).

   Place your `HUME_API_KEY` and `HUME_SECRET_KEY` in a `.env` file at the root of your project.

   ```shell
   echo "HUME_API_KEY=your_api_key_here" > .env
   echo "HUME_SECRET_KEY=your_secret_key_here" >> .env
   ```

   You can copy the `.env.example` file to use as a template.

4. Specify an EVI configuration (Optional):

   EVI is pre-configured with a set of default values, which are automatically applied if you do not specify a configuration. The default configuration includes a preset voice and language model, but does not include a system prompt or tools. To customize these options, you will need to create and specify your own EVI configuration. To learn more, see our [configuration guide](https://dev.hume.ai/docs/empathic-voice-interface-evi/configuration/build-a-configuration).

   Pass in a configuration ID to the `connect` method inside the [components/StartCall.tsx file](https://github.com/HumeAI/hume-api-examples/blob/main/evi/evi-next-js-app-router-quickstart/components/StartCall.tsx).

   ```tsx
   connect({
      auth: { type: "accessToken", value: accessToken },
      configId: "<YOUR_CONFIG_ID>"
   })
   ```

5. Run the project:
   ```shell
   npm run dev
   ```


================================================
FILE: evi/evi-next-js-app-router-quickstart/app/actions/set-llm-key.ts
================================================
"use server";

import { HumeClient } from "hume";

const hume = new HumeClient({
  apiKey: process.env.HUME_API_KEY!,
});

export async function setLlmKeyForChat(chatId: string) {
  const languageModelApiKey = process.env.SUPPLEMENTAL_LLM_API_KEY;
  if (!languageModelApiKey) return;

  await hume.empathicVoice.controlPlane.send(chatId, {
    type: "session_settings",
    languageModelApiKey,
  });
}


================================================
FILE: evi/evi-next-js-app-router-quickstart/app/api-key/page.tsx
================================================
import ChatLoader from "@/components/ChatLoader";

export const dynamic = "force-dynamic";
export const revalidate = 0;

export default async function ApiKeyPage() {
  const apiKey = process.env.HUME_API_KEY;
  if (!apiKey?.trim()) {
    throw new Error("The HUME_API_KEY environment variable is not set.");
  }

  return (
    <div className={"grow flex flex-col"}>
      <ChatLoader apiKey={apiKey} />
    </div>
  );
}


================================================
FILE: evi/evi-next-js-app-router-quickstart/app/error.tsx
================================================
"use client";

export default function Error() {
  return (
    <div className={"absolute inset-0 grid place-content-center"}>
      <div className={"text-center"}>
        <h1 className={"text-white"}>An unexpected error occurred</h1>
        <p className={"text-gray-500"}>Please try again later</p>
      </div>
    </div>
  );
}


================================================
FILE: evi/evi-next-js-app-router-quickstart/app/globals.css
================================================
@import "tailwindcss";

/* Register theme tokens for Tailwind v4 so utilities like border-border, font-sans work */
@theme {
  --color-border: hsl(var(--border));
  --color-input: hsl(var(--input));
  --color-ring: hsl(var(--ring));
  --color-background: hsl(var(--background));
  --color-foreground: hsl(var(--foreground));
  --color-primary: hsl(var(--primary));
  --color-primary-foreground: hsl(var(--primary-f
Download .txt
gitextract_vgmds2a9/

├── .github/
│   ├── dependabot.yml
│   └── workflows/
│       ├── dependabot-auto-merge.yml
│       └── test-examples.yml
├── .gitignore
├── Directory.Packages.props
├── LICENSE
├── README.md
├── evi/
│   ├── evi-dotnet-quickstart/
│   │   ├── .gitignore
│   │   ├── EviTests.cs
│   │   ├── Program.cs
│   │   ├── README.md
│   │   ├── evi-csharp-quickstart.csproj
│   │   ├── evi-csharp-quickstart.tests.csproj
│   │   └── sample_input.pcm
│   ├── evi-flutter/
│   │   ├── .gitignore
│   │   ├── README.md
│   │   ├── analysis_options.yaml
│   │   ├── android/
│   │   │   ├── .gitignore
│   │   │   ├── app/
│   │   │   │   ├── build.gradle
│   │   │   │   └── src/
│   │   │   │       ├── debug/
│   │   │   │       │   └── AndroidManifest.xml
│   │   │   │       ├── main/
│   │   │   │       │   ├── AndroidManifest.xml
│   │   │   │       │   ├── kotlin/
│   │   │   │       │   │   └── com/
│   │   │   │       │   │       └── example/
│   │   │   │       │   │           └── evi_example/
│   │   │   │       │   │               └── MainActivity.kt
│   │   │   │       │   └── res/
│   │   │   │       │       ├── drawable/
│   │   │   │       │       │   └── launch_background.xml
│   │   │   │       │       ├── drawable-v21/
│   │   │   │       │       │   └── launch_background.xml
│   │   │   │       │       ├── values/
│   │   │   │       │       │   └── styles.xml
│   │   │   │       │       └── values-night/
│   │   │   │       │           └── styles.xml
│   │   │   │       └── profile/
│   │   │   │           └── AndroidManifest.xml
│   │   │   ├── build.gradle
│   │   │   ├── gradle/
│   │   │   │   └── wrapper/
│   │   │   │       └── gradle-wrapper.properties
│   │   │   ├── gradle.properties
│   │   │   └── settings.gradle
│   │   ├── audio/
│   │   │   ├── .gitignore
│   │   │   ├── .metadata
│   │   │   ├── ios/
│   │   │   │   ├── .gitignore
│   │   │   │   ├── Assets/
│   │   │   │   │   └── .gitkeep
│   │   │   │   ├── Classes/
│   │   │   │   │   ├── AudioPlugin.swift
│   │   │   │   │   ├── Microphone.swift
│   │   │   │   │   └── SoundPlayer.swift
│   │   │   │   ├── Resources/
│   │   │   │   │   └── PrivacyInfo.xcprivacy
│   │   │   │   └── audio.podspec
│   │   │   ├── lib/
│   │   │   │   ├── audio.dart
│   │   │   │   ├── audio_method_channel.dart
│   │   │   │   ├── audio_platform_interface.dart
│   │   │   │   └── dart_audio.dart
│   │   │   ├── pubspec.yaml
│   │   │   └── test/
│   │   │       ├── audio_method_channel_test.dart
│   │   │       └── audio_test.dart
│   │   ├── ios/
│   │   │   ├── .gitignore
│   │   │   ├── Flutter/
│   │   │   │   ├── AppFrameworkInfo.plist
│   │   │   │   ├── Debug.xcconfig
│   │   │   │   └── Release.xcconfig
│   │   │   ├── Podfile
│   │   │   ├── Runner/
│   │   │   │   ├── AppDelegate.swift
│   │   │   │   ├── Assets.xcassets/
│   │   │   │   │   ├── AppIcon.appiconset/
│   │   │   │   │   │   └── Contents.json
│   │   │   │   │   └── LaunchImage.imageset/
│   │   │   │   │       ├── Contents.json
│   │   │   │   │       └── README.md
│   │   │   │   ├── Base.lproj/
│   │   │   │   │   ├── LaunchScreen.storyboard
│   │   │   │   │   └── Main.storyboard
│   │   │   │   ├── Info.plist
│   │   │   │   └── Runner-Bridging-Header.h
│   │   │   ├── Runner.xcodeproj/
│   │   │   │   ├── project.pbxproj
│   │   │   │   ├── project.xcworkspace/
│   │   │   │   │   ├── contents.xcworkspacedata
│   │   │   │   │   └── xcshareddata/
│   │   │   │   │       ├── IDEWorkspaceChecks.plist
│   │   │   │   │       └── WorkspaceSettings.xcsettings
│   │   │   │   └── xcshareddata/
│   │   │   │       └── xcschemes/
│   │   │   │           └── Runner.xcscheme
│   │   │   ├── Runner.xcworkspace/
│   │   │   │   ├── contents.xcworkspacedata
│   │   │   │   └── xcshareddata/
│   │   │   │       ├── IDEWorkspaceChecks.plist
│   │   │   │       └── WorkspaceSettings.xcsettings
│   │   │   ├── RunnerTests/
│   │   │   │   └── RunnerTests.swift
│   │   │   └── build/
│   │   │       └── ios/
│   │   │           └── XCBuildData/
│   │   │               └── PIFCache/
│   │   │                   └── workspace/
│   │   │                       └── WORKSPACE@v11_hash=(null)_subobjects=4483c0dac1d2a63621e8a5d74e580a19-json
│   │   ├── lib/
│   │   │   ├── chat_card.dart
│   │   │   ├── evi_message.dart
│   │   │   ├── main.dart
│   │   │   └── theme.dart
│   │   ├── pubspec.yaml
│   │   ├── test/
│   │   │   └── widget_test.dart
│   │   └── web/
│   │       ├── index.html
│   │       └── manifest.json
│   ├── evi-next-js-app-router-quickstart/
│   │   ├── .eslintrc.json
│   │   ├── .gitignore
│   │   ├── .prettierrc.json
│   │   ├── README.md
│   │   ├── app/
│   │   │   ├── actions/
│   │   │   │   └── set-llm-key.ts
│   │   │   ├── api-key/
│   │   │   │   └── page.tsx
│   │   │   ├── error.tsx
│   │   │   ├── globals.css
│   │   │   ├── layout.tsx
│   │   │   ├── page.tsx
│   │   │   └── session-settings/
│   │   │       └── page.tsx
│   │   ├── components/
│   │   │   ├── Chat.tsx
│   │   │   ├── ChatLoader.tsx
│   │   │   ├── Controls.tsx
│   │   │   ├── Expressions.tsx
│   │   │   ├── Messages.tsx
│   │   │   ├── MicFFT.tsx
│   │   │   ├── Nav.tsx
│   │   │   ├── StartCall.tsx
│   │   │   ├── logos/
│   │   │   │   ├── GitHub.tsx
│   │   │   │   └── Hume.tsx
│   │   │   └── ui/
│   │   │       ├── button.tsx
│   │   │       └── toggle.tsx
│   │   ├── components.json
│   │   ├── package.json
│   │   ├── playwright.config.ts
│   │   ├── postcss.config.mjs
│   │   ├── tailwind.config.ts
│   │   ├── test-results/
│   │   │   └── .last-run.json
│   │   ├── tests/
│   │   │   └── voice-react.spec.ts
│   │   ├── tsconfig.json
│   │   └── utils/
│   │       ├── e2e-hooks.ts
│   │       ├── expressionColors.ts
│   │       ├── index.ts
│   │       └── session-settings.ts
│   ├── evi-next-js-function-calling/
│   │   ├── .eslintrc.json
│   │   ├── .gitignore
│   │   ├── .prettierrc.json
│   │   ├── README.md
│   │   ├── app/
│   │   │   ├── api/
│   │   │   │   └── fetchWeather/
│   │   │   │       └── route.ts
│   │   │   ├── error.tsx
│   │   │   ├── globals.css
│   │   │   ├── layout.tsx
│   │   │   └── page.tsx
│   │   ├── components/
│   │   │   ├── Chat.tsx
│   │   │   ├── ChatLoader.tsx
│   │   │   ├── Controls.tsx
│   │   │   ├── Expressions.tsx
│   │   │   ├── Messages.tsx
│   │   │   ├── MicFFT.tsx
│   │   │   ├── Nav.tsx
│   │   │   ├── StartCall.tsx
│   │   │   ├── logos/
│   │   │   │   ├── GitHub.tsx
│   │   │   │   └── Hume.tsx
│   │   │   └── ui/
│   │   │       ├── button.tsx
│   │   │       └── toggle.tsx
│   │   ├── components.json
│   │   ├── package.json
│   │   ├── postcss.config.mjs
│   │   ├── tailwind.config.ts
│   │   ├── tsconfig.json
│   │   └── utils/
│   │       ├── expressionColors.ts
│   │       ├── fetchWeather.ts
│   │       └── index.ts
│   ├── evi-next-js-pages-router-quickstart/
│   │   ├── .eslintrc.json
│   │   ├── .gitignore
│   │   ├── .prettierrc.json
│   │   ├── README.md
│   │   ├── components/
│   │   │   ├── Chat.tsx
│   │   │   ├── Controls.tsx
│   │   │   ├── Expressions.tsx
│   │   │   ├── Messages.tsx
│   │   │   ├── MicFFT.tsx
│   │   │   ├── Nav.tsx
│   │   │   ├── StartCall.tsx
│   │   │   ├── logos/
│   │   │   │   ├── GitHub.tsx
│   │   │   │   └── Hume.tsx
│   │   │   └── ui/
│   │   │       ├── button.tsx
│   │   │       └── toggle.tsx
│   │   ├── components.json
│   │   ├── next.config.js
│   │   ├── package.json
│   │   ├── pages/
│   │   │   ├── 500.tsx
│   │   │   ├── _app.tsx
│   │   │   ├── _document.tsx
│   │   │   ├── api/
│   │   │   │   └── control-plane/
│   │   │   │       └── set-llm-key.ts
│   │   │   └── index.tsx
│   │   ├── postcss.config.mjs
│   │   ├── styles/
│   │   │   └── globals.css
│   │   ├── tailwind.config.ts
│   │   ├── tsconfig.json
│   │   └── utils/
│   │       ├── expressionColors.ts
│   │       └── index.ts
│   ├── evi-prompting-examples/
│   │   ├── README.md
│   │   ├── deeper_questions_prompt.txt
│   │   ├── default_prompt.txt
│   │   └── evi-3-default-prompt.txt
│   ├── evi-python-chat-history/
│   │   ├── .gitignore
│   │   ├── README.md
│   │   ├── main.py
│   │   ├── pyproject.toml
│   │   └── transcript_4d720063-d4ab-4407-ad22-e41079373d79.txt
│   ├── evi-python-clm-sse/
│   │   ├── README.md
│   │   ├── openai_sse.py
│   │   └── pyproject.toml
│   ├── evi-python-clm-wss/
│   │   ├── LICENSE
│   │   ├── README.md
│   │   ├── docs/
│   │   │   └── detailed-install-instructions-mac.md
│   │   ├── main.py
│   │   └── pyproject.toml
│   ├── evi-python-control-plane/
│   │   ├── LICENSE
│   │   ├── README.md
│   │   ├── main.py
│   │   └── pyproject.toml
│   ├── evi-python-function-calling/
│   │   ├── .gitignore
│   │   ├── LICENSE
│   │   ├── README.md
│   │   ├── main.py
│   │   └── utils.py
│   ├── evi-python-phone-calling-proxy-server/
│   │   ├── .gitignore
│   │   ├── README.md
│   │   ├── app.py
│   │   ├── audio_processors/
│   │   │   ├── __init__.py
│   │   │   ├── evi_audio_processor.py
│   │   │   └── twilio_audio_processor.py
│   │   ├── pyproject.toml
│   │   └── tools.py
│   ├── evi-python-quickstart/
│   │   ├── .gitignore
│   │   ├── LICENSE
│   │   ├── README.md
│   │   ├── conftest.py
│   │   ├── pyproject.toml
│   │   ├── quickstart.py
│   │   └── test_quickstart.py
│   ├── evi-python-raw-api/
│   │   ├── .gitignore
│   │   ├── LICENSE
│   │   ├── README.md
│   │   ├── requirements_linux.txt
│   │   ├── requirements_mac.txt
│   │   └── src/
│   │       ├── authenticator.py
│   │       ├── connection.py
│   │       ├── devices.py
│   │       └── main.py
│   ├── evi-python-webhooks/
│   │   ├── .gitignore
│   │   ├── README.md
│   │   ├── app.py
│   │   ├── pyproject.toml
│   │   └── utils.py
│   ├── evi-python-wss-clm-endpoint/
│   │   ├── .dockerignore
│   │   ├── Dockerfile
│   │   ├── README.md
│   │   ├── agent.py
│   │   ├── app.py
│   │   ├── cdk/
│   │   │   ├── README.md
│   │   │   ├── app.py
│   │   │   ├── cdk/
│   │   │   │   ├── __init__.py
│   │   │   │   └── eliza_stack.py
│   │   │   ├── cdk.json
│   │   │   ├── cdk.out/
│   │   │   │   ├── ElizaStack.assets.json
│   │   │   │   ├── ElizaStack.template.json
│   │   │   │   ├── asset.689e46f5ffafa1e0f81f114b5dfd7694d2d1e291d9bd855e4f7b601d2b2403d0/
│   │   │   │   │   ├── .dockerignore
│   │   │   │   │   ├── Dockerfile
│   │   │   │   │   ├── README.md
│   │   │   │   │   ├── agent.py
│   │   │   │   │   ├── app.py
│   │   │   │   │   ├── modal/
│   │   │   │   │   │   ├── README.md
│   │   │   │   │   │   └── modal_app.py
│   │   │   │   │   └── pyproject.toml
│   │   │   │   ├── asset.ee7de53d64cc9d6248fa6aa550f92358f6c907b5efd6f3298aeab1b5e7ea358a/
│   │   │   │   │   ├── __entrypoint__.js
│   │   │   │   │   └── index.js
│   │   │   │   ├── asset.f372550efb96be7f549f1d0346e8646080c1fe2b15c7c2e3b1dceb07b6656c54/
│   │   │   │   │   ├── .dockerignore
│   │   │   │   │   ├── Dockerfile
│   │   │   │   │   ├── README.md
│   │   │   │   │   ├── agent.py
│   │   │   │   │   ├── app.py
│   │   │   │   │   ├── modal_app.py
│   │   │   │   │   └── pyproject.toml
│   │   │   │   ├── cdk.out
│   │   │   │   ├── manifest.json
│   │   │   │   └── tree.json
│   │   │   └── requirements.txt
│   │   ├── modal/
│   │   │   ├── README.md
│   │   │   └── modal_app.py
│   │   ├── modal_app.py
│   │   └── pyproject.toml
│   ├── evi-react-native/
│   │   ├── .gitignore
│   │   ├── App.tsx
│   │   ├── README.md
│   │   ├── VoiceIsolationModePrompt.tsx
│   │   ├── app.json
│   │   ├── eslint.config.js
│   │   ├── index.ts
│   │   ├── metro.config.js
│   │   ├── modules/
│   │   │   └── audio/
│   │   │       ├── expo-module.config.json
│   │   │       ├── index.ts
│   │   │       └── src/
│   │   │           ├── AudioModule.ts
│   │   │           ├── AudioModule.types.ts
│   │   │           └── AudioModule.web.ts
│   │   ├── package.json
│   │   ├── polyfills.ts
│   │   └── tsconfig.json
│   ├── evi-swift-chat/
│   │   ├── .gitignore
│   │   ├── HumeDemo/
│   │   │   ├── Assets.xcassets/
│   │   │   │   ├── AccentColor.colorset/
│   │   │   │   │   └── Contents.json
│   │   │   │   ├── AppIcon.appiconset/
│   │   │   │   │   └── Contents.json
│   │   │   │   ├── Contents.json
│   │   │   │   └── Logo.imageset/
│   │   │   │       └── Contents.json
│   │   │   ├── EVIDemo/
│   │   │   │   ├── Clients/
│   │   │   │   │   └── AccessTokenClient.swift
│   │   │   │   ├── Extensions/
│   │   │   │   │   └── Dictionary+Additions.swift
│   │   │   │   ├── Mocks.swift
│   │   │   │   ├── Rows/
│   │   │   │   │   ├── DetailedRow.swift
│   │   │   │   │   └── MessageRow.swift
│   │   │   │   └── Views/
│   │   │   │       ├── Components/
│   │   │   │       │   ├── EventRowView.swift
│   │   │   │       │   └── RowView.swift
│   │   │   │       ├── EVIChatView.swift
│   │   │   │       ├── Models/
│   │   │   │       │   └── EVIChatModel.swift
│   │   │   │       └── Modifiers/
│   │   │   │           └── FlippedUpsideDown.swift
│   │   │   ├── HumeDemoApp.swift
│   │   │   ├── Info.plist
│   │   │   └── Preview Content/
│   │   │       ├── EVIChatModel+Previews.swift
│   │   │       └── Preview Assets.xcassets/
│   │   │           └── Contents.json
│   │   ├── HumeDemo.xcodeproj/
│   │   │   ├── project.pbxproj
│   │   │   └── xcshareddata/
│   │   │       └── xcschemes/
│   │   │           └── HumeDemo.xcscheme
│   │   ├── README.md
│   │   └── access_token_service/
│   │       ├── README.md
│   │       ├── requirements.txt
│   │       └── run_token_service.py
│   ├── evi-touchdesigner/
│   │   ├── .gitignore
│   │   ├── HumeTD.tox
│   │   ├── HumeTDDemo.toe
│   │   ├── README.md
│   │   └── Scripts/
│   │       ├── HumeTD.py
│   │       └── MessagePlaback.py
│   ├── evi-typescript-chat-history/
│   │   ├── .gitignore
│   │   ├── README.md
│   │   ├── package.json
│   │   ├── src/
│   │   │   └── index.ts
│   │   └── tsconfig.json
│   ├── evi-typescript-function-calling/
│   │   ├── .gitignore
│   │   ├── README.md
│   │   ├── index.html
│   │   ├── package.json
│   │   ├── src/
│   │   │   ├── handleToolCall.ts
│   │   │   ├── main.ts
│   │   │   ├── styles.css
│   │   │   └── vite-env.d.ts
│   │   └── tsconfig.json
│   ├── evi-typescript-proxy/
│   │   ├── .gitignore
│   │   ├── README.md
│   │   ├── app/
│   │   │   ├── api.ts
│   │   │   ├── cli.ts
│   │   │   ├── downstream.ts
│   │   │   ├── main.ts
│   │   │   ├── package.json
│   │   │   ├── tsconfig.json
│   │   │   ├── upstream.ts
│   │   │   └── util.ts
│   │   ├── shared/
│   │   │   └── types.mts
│   │   └── web/
│   │       ├── .gitignore
│   │       ├── ChatControls.tsx
│   │       ├── ChatMessages.tsx
│   │       ├── EVIChat.tsx
│   │       ├── StartCall.tsx
│   │       ├── WebSocketControls.tsx
│   │       ├── app.tsx
│   │       ├── index.html
│   │       ├── package.json
│   │       ├── styles.css
│   │       ├── tsconfig.json
│   │       └── useProxyState.ts
│   ├── evi-typescript-quickstart/
│   │   ├── .gitignore
│   │   ├── .prettierrc.json
│   │   ├── README.md
│   │   ├── index.html
│   │   ├── package.json
│   │   ├── src/
│   │   │   ├── lib/
│   │   │   │   ├── audio.ts
│   │   │   │   ├── evi.test.ts
│   │   │   │   ├── evi.ts
│   │   │   │   ├── index.ts
│   │   │   │   └── ui.ts
│   │   │   ├── main.ts
│   │   │   ├── styles/
│   │   │   │   └── globals.css
│   │   │   └── vite-env.d.ts
│   │   ├── tsconfig.json
│   │   └── vitest.config.ts
│   ├── evi-typescript-webhooks/
│   │   ├── .gitignore
│   │   ├── README.md
│   │   ├── package.json
│   │   ├── src/
│   │   │   ├── main.ts
│   │   │   └── util.ts
│   │   └── tsconfig.json
│   ├── evi-unity-quickstart/
│   │   ├── .gitignore
│   │   ├── Assets/
│   │   │   ├── DefaultScene.unity
│   │   │   ├── DefaultScene.unity.meta
│   │   │   ├── Editor/
│   │   │   │   └── AutoLoadDefaultScene.cs
│   │   │   ├── Plugins/
│   │   │   │   ├── Microsoft.Extensions.DependencyInjection.Abstractions.dll.meta
│   │   │   │   ├── Microsoft.Extensions.Logging.Abstractions.dll.meta
│   │   │   │   └── Microsoft.IO.RecyclableMemoryStream.dll.meta
│   │   │   ├── Plugins.meta
│   │   │   ├── Scripts/
│   │   │   │   ├── HumeEVI.cs
│   │   │   │   ├── HumeEVI.cs.meta
│   │   │   │   ├── SceneBuilder.cs
│   │   │   │   └── SceneBuilder.cs.meta
│   │   │   └── Scripts.meta
│   │   ├── Packages/
│   │   │   └── manifest.json
│   │   ├── ProjectSettings/
│   │   │   ├── AudioManager.asset
│   │   │   ├── ClusterInputManager.asset
│   │   │   ├── DynamicsManager.asset
│   │   │   ├── EditorBuildSettings.asset
│   │   │   ├── EditorSettings.asset
│   │   │   ├── GraphicsSettings.asset
│   │   │   ├── InputManager.asset
│   │   │   ├── MemorySettings.asset
│   │   │   ├── MultiplayerManager.asset
│   │   │   ├── NavMeshAreas.asset
│   │   │   ├── PackageManagerSettings.asset
│   │   │   ├── Physics2DSettings.asset
│   │   │   ├── PresetManager.asset
│   │   │   ├── ProjectSettings.asset
│   │   │   ├── ProjectVersion.txt
│   │   │   ├── QualitySettings.asset
│   │   │   ├── SceneTemplateSettings.json
│   │   │   ├── TagManager.asset
│   │   │   ├── TimeManager.asset
│   │   │   ├── UnityConnectSettings.asset
│   │   │   ├── VFXManager.asset
│   │   │   └── VersionControlSettings.asset
│   │   └── README.md
│   └── evi-vue-widget/
│       ├── .gitignore
│       ├── .nvmrc
│       ├── README.md
│       ├── index.html
│       ├── package.json
│       ├── src/
│       │   ├── App.vue
│       │   ├── components/
│       │   │   └── HumeEmbed.vue
│       │   ├── main.ts
│       │   ├── style.css
│       │   └── vite-env.d.ts
│       ├── tsconfig.json
│       ├── tsconfig.node.json
│       └── vite.config.ts
├── expression-measurement/
│   ├── batch/
│   │   ├── next-js-emotional-language/
│   │   │   ├── .env.example
│   │   │   ├── .eslintrc.json
│   │   │   ├── .gitignore
│   │   │   ├── .prettierrc
│   │   │   ├── README.md
│   │   │   ├── next.config.js
│   │   │   ├── package.json
│   │   │   ├── postcss.config.js
│   │   │   ├── src/
│   │   │   │   ├── components/
│   │   │   │   │   ├── Introduction.tsx
│   │   │   │   │   ├── TextRender.tsx
│   │   │   │   │   └── Tooltip.tsx
│   │   │   │   ├── lib/
│   │   │   │   │   ├── client.ts
│   │   │   │   │   ├── env.ts
│   │   │   │   │   ├── mutations/
│   │   │   │   │   │   └── processTextFile.ts
│   │   │   │   │   ├── schemas/
│   │   │   │   │   │   └── index.ts
│   │   │   │   │   └── utils.ts
│   │   │   │   ├── pages/
│   │   │   │   │   ├── _app.tsx
│   │   │   │   │   ├── _document.tsx
│   │   │   │   │   ├── api/
│   │   │   │   │   │   ├── results.ts
│   │   │   │   │   │   └── send.ts
│   │   │   │   │   └── index.tsx
│   │   │   │   └── styles/
│   │   │   │       └── globals.css
│   │   │   ├── tailwind.config.js
│   │   │   └── tsconfig.json
│   │   ├── python-top-emotions/
│   │   │   ├── README.md
│   │   │   └── top_emotions.py
│   │   └── typescript-raw-text-processor/
│   │       ├── .gitignore
│   │       ├── README.md
│   │       ├── package.json
│   │       ├── src/
│   │       │   ├── index.test.ts
│   │       │   └── index.ts
│   │       └── tsconfig.json
│   ├── streaming/
│   │   ├── next-js-streaming-example/
│   │   │   ├── .gitignore
│   │   │   ├── README.md
│   │   │   ├── components/
│   │   │   │   ├── inputs/
│   │   │   │   │   ├── Button.tsx
│   │   │   │   │   ├── TextArea.tsx
│   │   │   │   │   └── TextBox.tsx
│   │   │   │   ├── menu/
│   │   │   │   │   ├── Auth.tsx
│   │   │   │   │   ├── Login.tsx
│   │   │   │   │   ├── Nav.tsx
│   │   │   │   │   └── Toolbar.tsx
│   │   │   │   └── widgets/
│   │   │   │       ├── AudioWidgets.tsx
│   │   │   │       ├── BurstWidgets.tsx
│   │   │   │       ├── Descriptor.tsx
│   │   │   │       ├── DiscreteTimeline.tsx
│   │   │   │       ├── FaceTrackedVideo.tsx
│   │   │   │       ├── FaceWidgets.tsx
│   │   │   │       ├── LanguageWidgets.tsx
│   │   │   │       ├── Loader.tsx
│   │   │   │       ├── LoaderSet.tsx
│   │   │   │       ├── ProsodyWidgets.tsx
│   │   │   │       └── TopEmotions.tsx
│   │   │   ├── lib/
│   │   │   │   ├── data/
│   │   │   │   │   ├── audioPrediction.ts
│   │   │   │   │   ├── boundingBox.ts
│   │   │   │   │   ├── characterRange.ts
│   │   │   │   │   ├── embedding.ts
│   │   │   │   │   ├── emotion.ts
│   │   │   │   │   ├── facePrediction.ts
│   │   │   │   │   ├── languagePrediction.ts
│   │   │   │   │   ├── range.ts
│   │   │   │   │   ├── timeRange.ts
│   │   │   │   │   └── trackedFace.ts
│   │   │   │   ├── hooks/
│   │   │   │   │   ├── keyPress.ts
│   │   │   │   │   ├── stability.ts
│   │   │   │   │   └── storage.ts
│   │   │   │   ├── media/
│   │   │   │   │   ├── audioRecorder.ts
│   │   │   │   │   └── videoRecorder.ts
│   │   │   │   └── utilities/
│   │   │   │       ├── asyncUtilities.ts
│   │   │   │       ├── blobUtilities.ts
│   │   │   │       ├── embeddingUtilities.ts
│   │   │   │       ├── emotionUtilities.ts
│   │   │   │       ├── environmentUtilities.ts
│   │   │   │       ├── scalingUtilities.ts
│   │   │   │       ├── styleUtilities.ts
│   │   │   │       └── typeUtilities.ts
│   │   │   ├── next.config.js
│   │   │   ├── package.json
│   │   │   ├── pages/
│   │   │   │   ├── _app.tsx
│   │   │   │   ├── burst/
│   │   │   │   │   ├── index.tsx
│   │   │   │   │   └── timeline/
│   │   │   │   │       └── index.tsx
│   │   │   │   ├── face/
│   │   │   │   │   ├── calibrate/
│   │   │   │   │   │   └── index.tsx
│   │   │   │   │   └── index.tsx
│   │   │   │   ├── index.tsx
│   │   │   │   ├── language/
│   │   │   │   │   └── index.tsx
│   │   │   │   └── prosody/
│   │   │   │       └── index.tsx
│   │   │   ├── postcss.config.js
│   │   │   ├── styles/
│   │   │   │   └── globals.css
│   │   │   ├── tailwind.config.js
│   │   │   └── tsconfig.json
│   │   └── python-streaming-example/
│   │       ├── .gitignore
│   │       ├── README.md
│   │       ├── main.py
│   │       ├── pyproject.toml
│   │       └── test_main.py
│   └── visualization-example/
│       ├── example-notebook.ipynb
│       └── predictions.json
├── monorepo.code-workspace
└── tts/
    ├── tts-dotnet-quickstart/
    │   ├── .gitignore
    │   ├── Program.cs
    │   ├── README.md
    │   ├── StreamingTtsService.cs
    │   ├── TtsTests.cs
    │   ├── tts-csharp-quickstart.csproj
    │   └── tts-csharp-quickstart.tests.csproj
    ├── tts-next-js-agora/
    │   ├── .gitignore
    │   ├── README.md
    │   ├── app/
    │   │   ├── api/
    │   │   │   ├── generate-agora-token/
    │   │   │   │   └── route.ts
    │   │   │   ├── invite-agent/
    │   │   │   │   └── route.ts
    │   │   │   └── stop-conversation/
    │   │   │       └── route.ts
    │   │   ├── globals.css
    │   │   ├── layout.tsx
    │   │   └── page.tsx
    │   ├── components/
    │   │   ├── AudioVisualizer.tsx
    │   │   ├── ConversationComponent.tsx
    │   │   ├── ConvoTextStream.tsx
    │   │   └── MicrophoneButton.tsx
    │   ├── env.example
    │   ├── eslint.config.mjs
    │   ├── lib/
    │   │   └── message.ts
    │   ├── next.config.ts
    │   ├── package.json
    │   ├── tsconfig.json
    │   └── types/
    │       ├── agora-rtc-react.d.ts
    │       ├── agora-token.d.ts
    │       └── conversation.ts
    ├── tts-next-js-chat/
    │   ├── .gitignore
    │   ├── README.md
    │   ├── eslint.config.mjs
    │   ├── next.config.ts
    │   ├── package.json
    │   ├── postcss.config.mjs
    │   ├── src/
    │   │   └── app/
    │   │       ├── api/
    │   │       │   ├── chat/
    │   │       │   │   └── route.ts
    │   │       │   ├── transcribe/
    │   │       │   │   └── route.ts
    │   │       │   ├── tts/
    │   │       │   │   └── route.ts
    │   │       │   └── voices/
    │   │       │       └── route.ts
    │   │       ├── components/
    │   │       │   ├── AudioPlayer.tsx
    │   │       │   ├── Chat.tsx
    │   │       │   ├── ControlsPanel.tsx
    │   │       │   ├── VoiceSelector.tsx
    │   │       │   └── logos/
    │   │       │       └── Hume.tsx
    │   │       ├── context/
    │   │       │   └── VoiceSettingsContext.tsx
    │   │       ├── globals.css
    │   │       ├── hooks/
    │   │       │   ├── useRecording.ts
    │   │       │   ├── useTts.ts
    │   │       │   └── useVoices.ts
    │   │       ├── layout.tsx
    │   │       ├── lib/
    │   │       │   └── humeClient.ts
    │   │       └── page.tsx
    │   └── tsconfig.json
    ├── tts-next-js-vercel-ai-sdk/
    │   ├── .gitignore
    │   ├── README.md
    │   ├── next.config.ts
    │   ├── package.json
    │   ├── postcss.config.mjs
    │   ├── src/
    │   │   ├── actions/
    │   │   │   ├── generate-speech.ts
    │   │   │   └── list-voices.ts
    │   │   ├── app/
    │   │   │   ├── globals.css
    │   │   │   ├── layout.tsx
    │   │   │   └── page.tsx
    │   │   ├── components/
    │   │   │   ├── AudioClipCard.tsx
    │   │   │   ├── AudioGallery.tsx
    │   │   │   ├── TextAreaField.tsx
    │   │   │   ├── TtsForm.tsx
    │   │   │   ├── VoiceSelect.tsx
    │   │   │   └── logos/
    │   │   │       └── Hume.tsx
    │   │   ├── hooks/
    │   │   │   └── useVoices.ts
    │   │   └── types/
    │   │       └── clip.ts
    │   └── tsconfig.json
    ├── tts-python-livekit/
    │   ├── .gitignore
    │   ├── .python-version
    │   ├── README.md
    │   ├── pyproject.toml
    │   └── src/
    │       ├── __init__.py
    │       ├── agent_session/
    │       │   ├── __init__.py
    │       │   ├── constants.py
    │       │   └── main.py
    │       ├── standalone_tts/
    │       │   ├── __init__.py
    │       │   └── main.py
    │       └── utils.py
    ├── tts-python-quickstart/
    │   ├── .gitignore
    │   ├── README.md
    │   ├── app.py
    │   ├── conftest.py
    │   ├── pyproject.toml
    │   └── test_app.py
    ├── tts-swift-quickstart/
    │   ├── .gitignore
    │   ├── HumeDemo/
    │   │   ├── Assets.xcassets/
    │   │   │   ├── AccentColor.colorset/
    │   │   │   │   └── Contents.json
    │   │   │   ├── AppIcon.appiconset/
    │   │   │   │   └── Contents.json
    │   │   │   ├── Contents.json
    │   │   │   └── Logo.imageset/
    │   │   │       └── Contents.json
    │   │   ├── HumeDemoApp.swift
    │   │   ├── Info.plist
    │   │   ├── Preview Content/
    │   │   │   └── Preview Assets.xcassets/
    │   │   │       └── Contents.json
    │   │   └── TTSDemo/
    │   │       ├── Clients/
    │   │       │   └── AccessTokenClient.swift
    │   │       ├── Extensions/
    │   │       │   └── Dictionary+Additions.swift
    │   │       ├── Mocks.swift
    │   │       └── Views/
    │   │           ├── Components/
    │   │           │   ├── RowView.swift
    │   │           │   └── TTSEventView.swift
    │   │           ├── Models/
    │   │           │   ├── TTSEvent.swift
    │   │           │   ├── TTSModel+Types.swift
    │   │           │   └── TTSModel.swift
    │   │           ├── Modifiers/
    │   │           │   └── FlippedUpsideDown.swift
    │   │           └── TTSView.swift
    │   ├── HumeDemo.xcodeproj/
    │   │   ├── project.pbxproj
    │   │   └── xcshareddata/
    │   │       └── xcschemes/
    │   │           └── HumeDemo.xcscheme
    │   ├── README.md
    │   └── access_token_service/
    │       ├── README.md
    │       ├── requirements.txt
    │       └── run_token_service.py
    ├── tts-typescript-lipsync/
    │   ├── .gitignore
    │   ├── README.md
    │   ├── index.html
    │   ├── index.tsx
    │   ├── mouth.ts
    │   ├── package.json
    │   ├── tsconfig.json
    │   └── vite-env.d.ts
    ├── tts-typescript-quickstart/
    │   ├── .gitignore
    │   ├── README.md
    │   ├── audio_player.ts
    │   ├── index.test.ts
    │   ├── index.ts
    │   ├── package.json
    │   ├── pnpm-workspace.yaml
    │   ├── tsconfig.json
    │   ├── vite-env.d.ts
    │   ├── vite.config.ts
    │   └── vitest.config.ts
    └── tts-unity-quickstart/
        ├── .gitignore
        ├── Assets/
        │   ├── DefaultScene.unity
        │   ├── DefaultScene.unity.meta
        │   ├── Scripts/
        │   │   ├── HumeSpeaker.cs
        │   │   ├── HumeSpeaker.cs.meta
        │   │   ├── SceneBuilder.cs
        │   │   └── SceneBuilder.cs.meta
        │   └── Scripts.meta
        ├── Packages/
        │   └── manifest.json
        ├── ProjectSettings/
        │   ├── AudioManager.asset
        │   ├── ClusterInputManager.asset
        │   ├── DynamicsManager.asset
        │   ├── EditorBuildSettings.asset
        │   ├── EditorSettings.asset
        │   ├── GraphicsSettings.asset
        │   ├── InputManager.asset
        │   ├── MemorySettings.asset
        │   ├── MultiplayerManager.asset
        │   ├── NavMeshAreas.asset
        │   ├── PackageManagerSettings.asset
        │   ├── Physics2DSettings.asset
        │   ├── PresetManager.asset
        │   ├── ProjectSettings.asset
        │   ├── ProjectVersion.txt
        │   ├── QualitySettings.asset
        │   ├── SceneTemplateSettings.json
        │   ├── TagManager.asset
        │   ├── TimeManager.asset
        │   ├── UnityConnectSettings.asset
        │   ├── VFXManager.asset
        │   └── VersionControlSettings.asset
        └── README.md
Download .txt
SYMBOL INDEX (787 symbols across 233 files)

FILE: evi/evi-dotnet-quickstart/EviTests.cs
  class EviTestFixture (line 18) | public class EviTestFixture : IAsyncLifetime
    method InitializeAsync (line 23) | public Task InitializeAsync()
    method DisposeAsync (line 43) | public Task DisposeAsync()
  class EviConnectionTests (line 49) | [Collection("EviTests")]
    method EviConnectionTests (line 55) | public EviConnectionTests(EviTestFixture fixture, ITestOutputHelper ou...
    method TestFixture_HasApiKey (line 61) | [Fact(DisplayName = "test fixture has API key")]
    method Connects_StartsChat_ReceivesChatId_StaysAlive (line 68) | [Fact(DisplayName = "connects w/ API key, starts a chat, receives a ch...
    method Connects_VerifiesSessionSettingsOnConnect (line 103) | [Fact(DisplayName = "connects w/ API key, verifies sessionSettings are...
    method Connects_VerifiesSessionSettingsUpdatedAfterConnect (line 189) | [Fact(DisplayName = "connects w/ API key, verifies sessionSettings can...
  class EviTestCollection (line 260) | [CollectionDefinition("EviTests")]

FILE: evi/evi-flutter/audio/lib/audio.dart
  class Audio (line 8) | class Audio {
    method startRecording (line 43) | Future<void> startRecording()
    method stopRecording (line 62) | Future<void> stopRecording()
    method mute (line 70) | Future<void> mute()
    method unmute (line 78) | Future<void> unmute()
    method enqueueAudio (line 86) | Future<void> enqueueAudio(String base64String)
    method stopPlayback (line 95) | Future<void> stopPlayback()
    method dispose (line 103) | Future<void> dispose()

FILE: evi/evi-flutter/audio/lib/audio_method_channel.dart
  class MethodChannelAudio (line 7) | class MethodChannelAudio extends AudioPlatform {
    method getPlatformVersion (line 13) | Future<String?> getPlatformVersion()

FILE: evi/evi-flutter/audio/lib/audio_platform_interface.dart
  class AudioPlatform (line 5) | abstract class AudioPlatform extends PlatformInterface {
    method getPlatformVersion (line 26) | Future<String?> getPlatformVersion()

FILE: evi/evi-flutter/audio/lib/dart_audio.dart
  class DartAudio (line 7) | class DartAudio {
    method enqueueAudioSegment (line 50) | void enqueueAudioSegment(String base64Bytes)
    method stopPlayback (line 59) | void stopPlayback()
    method _playNextAudioSegment (line 64) | void _playNextAudioSegment()
    method startRecording (line 77) | Future<Stream<List<int>>> startRecording()
    method stopRecording (line 129) | Future<void> stopRecording()
    method mute (line 139) | Future<void> mute()
    method unmute (line 143) | Future<void> unmute()
    method dispose (line 154) | Future<void> dispose()

FILE: evi/evi-flutter/audio/test/audio_method_channel_test.dart
  function main (line 5) | void main()

FILE: evi/evi-flutter/audio/test/audio_test.dart
  class MockAudioPlatform (line 7) | class MockAudioPlatform
    method getPlatformVersion (line 12) | Future<String?> getPlatformVersion()
  function main (line 15) | void main()

FILE: evi/evi-flutter/lib/chat_card.dart
  type Role (line 6) | enum Role { user, assistant }
  class Score (line 8) | class Score {
    method toJson (line 14) | Map<String, dynamic> toJson()
  class ChatEntry (line 22) | class ChatEntry {
  class ChatCard (line 35) | class ChatCard extends StatelessWidget {
    method build (line 40) | Widget build(BuildContext context)
  class ChatDisplay (line 86) | class ChatDisplay extends StatelessWidget {
    method build (line 91) | Widget build(BuildContext context)

FILE: evi/evi-flutter/lib/evi_message.dart
  class EviMessage (line 11) | sealed class EviMessage {
  class ErrorMessage (line 39) | class ErrorMessage extends EviMessage {
  class ChatMetadataMessage (line 46) | class ChatMetadataMessage extends EviMessage {
  class AudioOutputMessage (line 50) | class AudioOutputMessage extends EviMessage {
  class UserInterruptionMessage (line 57) | class UserInterruptionMessage extends EviMessage {
  class ChatMessage (line 61) | class ChatMessage {
  class ProsodyInference (line 69) | class ProsodyInference {
  class Inference (line 74) | class Inference {
  class AssistantMessage (line 79) | class AssistantMessage extends EviMessage {
  class UserMessage (line 88) | class UserMessage extends EviMessage {
  class UnknownMessage (line 97) | class UnknownMessage extends EviMessage {

FILE: evi/evi-flutter/lib/main.dart
  class ConfigManager (line 13) | class ConfigManager {
    method fetchHumeApiKey (line 25) | String fetchHumeApiKey()
    method fetchAccessToken (line 29) | Future<String> fetchAccessToken()
    method loadConfig (line 44) | Future<void> loadConfig()
  function main (line 58) | void main()
  class MyApp (line 68) | class MyApp extends StatelessWidget {
    method build (line 72) | Widget build(BuildContext context)
    method extractTopThreeEmotions (line 90) | List<Score> extractTopThreeEmotions(evi.Inference models)
  class ErrorMessage (line 109) | class ErrorMessage extends StatelessWidget {
    method build (line 115) | Widget build(BuildContext context)
  class MyHomePage (line 125) | class MyHomePage extends StatefulWidget {
    method createState (line 131) | State<MyHomePage> createState()
  class _MyHomePageState (line 134) | class _MyHomePageState extends State<MyHomePage> {
    method appendNewChatMessage (line 146) | void appendNewChatMessage(evi.ChatMessage chatMessage, evi.Inference m...
    method build (line 159) | Widget build(BuildContext context)
    method dispose (line 205) | void dispose()
    method _connect (line 212) | void _connect()
    method _disconnect (line 285) | void _disconnect()
    method _handleConnectionClosed (line 293) | void _handleConnectionClosed()
    method _handleInterruption (line 300) | void _handleInterruption()
    method _muteInput (line 304) | void _muteInput()
    method _prepareAudioSettings (line 311) | void _prepareAudioSettings()
    method _sendAudio (line 324) | void _sendAudio(String base64)
    method _startRecording (line 331) | void _startRecording()
    method _stopRecording (line 342) | void _stopRecording()
    method _unmuteInput (line 346) | void _unmuteInput()

FILE: evi/evi-flutter/test/widget_test.dart
  function main (line 13) | void main()

FILE: evi/evi-next-js-app-router-quickstart/app/actions/set-llm-key.ts
  function setLlmKeyForChat (line 9) | async function setLlmKeyForChat(chatId: string) {

FILE: evi/evi-next-js-app-router-quickstart/app/api-key/page.tsx
  function ApiKeyPage (line 6) | async function ApiKeyPage() {

FILE: evi/evi-next-js-app-router-quickstart/app/error.tsx
  function Error (line 3) | function Error() {

FILE: evi/evi-next-js-app-router-quickstart/app/layout.tsx
  function RootLayout (line 13) | function RootLayout({

FILE: evi/evi-next-js-app-router-quickstart/app/page.tsx
  function Page (line 7) | async function Page() {

FILE: evi/evi-next-js-app-router-quickstart/app/session-settings/page.tsx
  function SessionSettingsPage (line 8) | async function SessionSettingsPage() {

FILE: evi/evi-next-js-app-router-quickstart/components/Chat.tsx
  type ChatProps (line 12) | type ChatProps = (
  function ClientComponent (line 19) | function ClientComponent({

FILE: evi/evi-next-js-app-router-quickstart/components/ChatLoader.tsx
  type ChatLoaderProps (line 10) | type ChatLoaderProps = (
  function ChatLoader (line 17) | function ChatLoader({

FILE: evi/evi-next-js-app-router-quickstart/components/Controls.tsx
  constant E2E_ENABLED (line 12) | const E2E_ENABLED =
  function Controls (line 16) | function Controls() {

FILE: evi/evi-next-js-app-router-quickstart/components/Expressions.tsx
  function Expressions (line 8) | function Expressions({

FILE: evi/evi-next-js-app-router-quickstart/components/MicFFT.tsx
  function MicFFT (line 7) | function MicFFT({

FILE: evi/evi-next-js-app-router-quickstart/components/StartCall.tsx
  type StartCallProps (line 7) | type StartCallProps = (
  function StartCall (line 14) | function StartCall({

FILE: evi/evi-next-js-app-router-quickstart/components/logos/Hume.tsx
  type HumeLogoProps (line 4) | type HumeLogoProps = SVGAttributes<SVGSVGElement>;
  function HumeLogo (line 6) | function HumeLogo(props: HumeLogoProps) {

FILE: evi/evi-next-js-app-router-quickstart/components/ui/button.tsx
  type ButtonProps (line 36) | interface ButtonProps

FILE: evi/evi-next-js-app-router-quickstart/tests/voice-react.spec.ts
  function waitForChatMetadataFromPage (line 217) | async function waitForChatMetadataFromPage(page: Page, timeoutMs = 30_00...
  function fetchChatEvents (line 235) | async function fetchChatEvents(

FILE: evi/evi-next-js-app-router-quickstart/utils/e2e-hooks.ts
  constant ENABLED (line 1) | const ENABLED =
  type Window (line 6) | interface Window {
  function getWindow (line 13) | function getWindow(): Window | null {
  function recordVoiceEvent (line 20) | function recordVoiceEvent(event: unknown) {
  function trackVoiceStatus (line 28) | function trackVoiceStatus(status: string) {

FILE: evi/evi-next-js-app-router-quickstart/utils/index.ts
  function cn (line 4) | function cn(...inputs: ClassValue[]) {

FILE: evi/evi-next-js-app-router-quickstart/utils/session-settings.ts
  constant E2E_SESSION_SETTINGS (line 3) | const E2E_SESSION_SETTINGS = {

FILE: evi/evi-next-js-function-calling/app/api/fetchWeather/route.ts
  function POST (line 4) | async function POST(request: Request) {

FILE: evi/evi-next-js-function-calling/app/error.tsx
  function Error (line 3) | function Error() {

FILE: evi/evi-next-js-function-calling/app/layout.tsx
  function RootLayout (line 13) | function RootLayout({

FILE: evi/evi-next-js-function-calling/app/page.tsx
  function Page (line 7) | async function Page() {

FILE: evi/evi-next-js-function-calling/components/Chat.tsx
  type ToolMeta (line 9) | type ToolMeta = {
  function ClientComponent (line 59) | function ClientComponent({

FILE: evi/evi-next-js-function-calling/components/ChatLoader.tsx
  function ChatLoader (line 9) | function ChatLoader({ accessToken }: { accessToken: string }) {

FILE: evi/evi-next-js-function-calling/components/Controls.tsx
  function Controls (line 10) | function Controls() {

FILE: evi/evi-next-js-function-calling/components/Expressions.tsx
  function Expressions (line 8) | function Expressions({

FILE: evi/evi-next-js-function-calling/components/MicFFT.tsx
  function MicFFT (line 7) | function MicFFT({

FILE: evi/evi-next-js-function-calling/components/StartCall.tsx
  function StartCall (line 6) | function StartCall({ accessToken }: { accessToken: string }) {

FILE: evi/evi-next-js-function-calling/components/logos/Hume.tsx
  type HumeLogoProps (line 4) | type HumeLogoProps = SVGAttributes<SVGSVGElement>;
  function HumeLogo (line 6) | function HumeLogo(props: HumeLogoProps) {

FILE: evi/evi-next-js-function-calling/components/ui/button.tsx
  type ButtonProps (line 36) | interface ButtonProps

FILE: evi/evi-next-js-function-calling/utils/index.ts
  function cn (line 4) | function cn(...inputs: ClassValue[]) {

FILE: evi/evi-next-js-pages-router-quickstart/components/Chat.tsx
  function ClientComponent (line 9) | function ClientComponent({

FILE: evi/evi-next-js-pages-router-quickstart/components/Controls.tsx
  function Controls (line 10) | function Controls() {

FILE: evi/evi-next-js-pages-router-quickstart/components/Expressions.tsx
  function Expressions (line 8) | function Expressions({

FILE: evi/evi-next-js-pages-router-quickstart/components/MicFFT.tsx
  function MicFFT (line 7) | function MicFFT({

FILE: evi/evi-next-js-pages-router-quickstart/components/StartCall.tsx
  function StartCall (line 6) | function StartCall({ accessToken }: { accessToken: string }) {

FILE: evi/evi-next-js-pages-router-quickstart/components/logos/Hume.tsx
  type HumeLogoProps (line 4) | type HumeLogoProps = SVGAttributes<SVGSVGElement>;
  function HumeLogo (line 6) | function HumeLogo(props: HumeLogoProps) {

FILE: evi/evi-next-js-pages-router-quickstart/components/ui/button.tsx
  type ButtonProps (line 36) | interface ButtonProps

FILE: evi/evi-next-js-pages-router-quickstart/pages/500.tsx
  function ErrorPage (line 1) | function ErrorPage() {

FILE: evi/evi-next-js-pages-router-quickstart/pages/_app.tsx
  function App (line 8) | function App({ Component, pageProps }: AppProps) {

FILE: evi/evi-next-js-pages-router-quickstart/pages/_document.tsx
  function Document (line 4) | function Document() {

FILE: evi/evi-next-js-pages-router-quickstart/pages/api/control-plane/set-llm-key.ts
  function handler (line 6) | async function handler(

FILE: evi/evi-next-js-pages-router-quickstart/pages/index.tsx
  type PageProps (line 33) | type PageProps = InferGetServerSidePropsType<typeof getServerSideProps>;
  function Page (line 35) | function Page({ accessToken }: PageProps) {

FILE: evi/evi-next-js-pages-router-quickstart/utils/index.ts
  function cn (line 4) | function cn(...inputs: ClassValue[]) {

FILE: evi/evi-python-chat-history/main.py
  class EmotionScore (line 12) | class EmotionScore(TypedDict):
  function main (line 16) | async def main() -> None:
  function fetch_all_chat_events (line 44) | async def fetch_all_chat_events(chat_id: str) -> list[ReturnChatEvent]:
  function generate_transcript (line 66) | def generate_transcript(chat_events: list[ReturnChatEvent]) -> str:
  function get_top_emotions (line 88) | def get_top_emotions(chat_events: list[ReturnChatEvent]) -> dict[str, fl...

FILE: evi/evi-python-clm-sse/openai_sse.py
  function stream_messages_from_openai (line 14) | async def stream_messages_from_openai(
  function verify_token (line 39) | async def verify_token(credentials: HTTPAuthorizationCredentials = Secur...
  function root (line 46) | async def root(

FILE: evi/evi-python-clm-wss/main.py
  class ProsodyModel (line 12) | class ProsodyModel(TypedDict):
  class Models (line 16) | class Models(TypedDict):
  class MessageContent (line 20) | class MessageContent(TypedDict):
  class HumeMessage (line 25) | class HumeMessage(TypedDict):
  class MessagesPayload (line 30) | class MessagesPayload(TypedDict):
  class ChatHistoryItem (line 34) | class ChatHistoryItem(TypedDict):
  class Agent (line 38) | class Agent:
    method __init__ (line 49) | def __init__(self):
    method _extract_prosody_scores (line 56) | def _extract_prosody_scores(self, message: HumeMessage) -> ProsodyScores:
    method _get_top_prosody_scores (line 65) | def _get_top_prosody_scores(self, prosody_scores: ProsodyScores, count...
    method _prosody_report (line 69) | def _prosody_report(self, prosody_scores: ProsodyScores) -> str:
    method _count_messages_by_role (line 76) | def _count_messages_by_role(self, chat_history: List[ChatHistoryItem])...
    method parse_hume_payload (line 81) | def parse_hume_payload(self, messages_payload: MessagesPayload) -> Tup...
    method add_prosody_to_utterance (line 120) | def add_prosody_to_utterance(self, content: str, prosody_scores: Proso...
    method _generate_eliza_response (line 126) | def _generate_eliza_response(self) -> str:
    method _should_send_congratulations (line 129) | def _should_send_congratulations(self, user_count: int, assistant_coun...
    method respond (line 132) | def respond(self, message: str, chat_history: List[ChatHistoryItem], l...
  function websocket_endpoint (line 152) | async def websocket_endpoint(websocket: WebSocket):

FILE: evi/evi-python-control-plane/main.py
  function load_config (line 32) | def load_config() -> tuple[str, str]:
  function send_control_message (line 47) | async def send_control_message(client: AsyncHumeClient, chat_id: str, me...
  function observe_chat (line 75) | async def observe_chat(api_key: str, chat_id: str, on_message_callback) ...
  function observer_message_handler (line 123) | async def observer_message_handler(message: dict) -> None:
  function control_plane_demo (line 166) | async def control_plane_demo(
  function main_new_chat (line 241) | async def main_new_chat() -> None:
  function find_active_chat (line 309) | def find_active_chat(client: HumeClient, config_id: str):
  function main_existing_chat (line 326) | async def main_existing_chat() -> None:
  function main (line 350) | async def main() -> None:

FILE: evi/evi-python-function-calling/main.py
  class WebSocketHandler (line 15) | class WebSocketHandler:
    method __init__ (line 18) | def __init__(self):
    method set_socket (line 23) | def set_socket(self, socket: AsyncChatSocketClient):
    method handle_tool_call (line 35) | async def handle_tool_call(self, message: ToolCallMessage) -> Union[To...
    method on_open (line 94) | async def on_open(self):
    method on_message (line 98) | async def on_message(self, message: SubscribeEvent):
    method on_close (line 156) | async def on_close(self):
    method on_error (line 160) | async def on_error(self, error):
  function fetch_weather (line 170) | async def fetch_weather(location: str, format: str) -> str:
  function sending_handler (line 277) | async def sending_handler(socket: AsyncChatSocketClient):
  function main (line 297) | async def main() -> None:

FILE: evi/evi-python-function-calling/utils.py
  function print_prompt (line 3) | def print_prompt(text: str) -> None:
  function extract_top_n_emotions (line 9) | def extract_top_n_emotions(emotion_scores: dict, n: int) -> dict:
  function print_emotion_scores (line 14) | def print_emotion_scores(emotion_scores: dict) -> None:

FILE: evi/evi-python-phone-calling-proxy-server/app.py
  function serve_homepage (line 25) | def serve_homepage():
  function twiml_response (line 30) | def twiml_response():
  function media_stream (line 54) | def media_stream(ws):
  function handle_media_stream (line 63) | async def handle_media_stream(ws):

FILE: evi/evi-python-phone-calling-proxy-server/audio_processors/evi_audio_processor.py
  class AudioProcessingConfig (line 15) | class AudioProcessingConfig:
  class EviAudioProcessor (line 27) | class EviAudioProcessor:
    method __init__ (line 32) | def __init__(
    method postprocess_audio (line 39) | def postprocess_audio(self, evi_audio: bytes) -> bytes:
    method _read_audio (line 57) | def _read_audio(self, evi_audio: bytes) -> tuple[np.ndarray, int]:
    method _ensure_float (line 67) | def _ensure_float(self, audio: np.ndarray) -> np.ndarray:
    method _resample_audio (line 73) | def _resample_audio(self, audio: np.ndarray, original_fs: int, target_...
    method _apply_filters (line 86) | def _apply_filters(self, audio: np.ndarray, fs: int) -> np.ndarray:
    method _high_pass_filter (line 92) | def _high_pass_filter(self, audio: np.ndarray, fs: int) -> np.ndarray:
    method _peak_filter (line 102) | def _peak_filter(self, audio: np.ndarray, fs: int) -> np.ndarray:
    method _notch_filter (line 117) | def _notch_filter(self, audio: np.ndarray, fs: int) -> np.ndarray:
    method _normalize_audio (line 125) | def _normalize_audio(self, audio: np.ndarray) -> np.ndarray:

FILE: evi/evi-python-phone-calling-proxy-server/audio_processors/twilio_audio_processor.py
  class TwilioAudioProcessor (line 12) | class TwilioAudioProcessor:
    method __init__ (line 24) | def __init__(self) -> None:
    method fill_silence (line 29) | def fill_silence(self, current_timestamp: int) -> None:
    method buffer_inbound_audio (line 42) | def buffer_inbound_audio(self, twilio_media_payload: Dict[str, Any]) -...
    method queue_twilio_audio (line 49) | async def queue_twilio_audio(self, twilio_media_payload: Dict[str, Any...

FILE: evi/evi-python-phone-calling-proxy-server/tools.py
  function supportAssistant (line 5) | async def supportAssistant(ticket_id: str) -> str:

FILE: evi/evi-python-quickstart/conftest.py
  function pytest_collection_modifyitems (line 4) | def pytest_collection_modifyitems(config, items):

FILE: evi/evi-python-quickstart/quickstart.py
  function extract_top_n_emotions (line 11) | def extract_top_n_emotions(emotion_scores: dict, n: int) -> dict:
  function print_emotions (line 18) | def print_emotions(emotion_scores: dict) -> None:
  function log (line 22) | def log(text: str) -> None:
  function on_message (line 27) | async def on_message(message: SubscribeEvent, stream: Stream) -> None:
  function main (line 41) | async def main() -> None:

FILE: evi/evi-python-quickstart/test_quickstart.py
  function api_key (line 21) | def api_key():
  function hume_client (line 29) | def hume_client(api_key):
  function hume_client_sync (line 34) | def hume_client_sync(api_key):
  function test_connect_to_evi (line 39) | async def test_connect_to_evi(hume_client):
  function test_session_settings_on_connect (line 84) | async def test_session_settings_on_connect(hume_client, hume_client_sync):
  function test_session_settings_upd_after_connect (line 152) | async def test_session_settings_upd_after_connect(hume_client, hume_clie...

FILE: evi/evi-python-raw-api/src/authenticator.py
  class Authenticator (line 7) | class Authenticator:
    method __init__ (line 17) | def __init__(self, api_key: str, secret_key: str, host: str = "test-ap...
    method fetch_access_token (line 30) | def fetch_access_token(self) -> str:

FILE: evi/evi-python-raw-api/src/connection.py
  class Connection (line 25) | class Connection:
    method connect (line 31) | async def connect(
    method _receive_audio_data (line 84) | async def _receive_audio_data(cls, socket):
    method _read_audio_stream_non_blocking (line 122) | async def _read_audio_stream_non_blocking(cls, audio_stream, chunk_size):
    method _send_audio_data (line 140) | async def _send_audio_data(

FILE: evi/evi-python-raw-api/src/devices.py
  class AudioDevices (line 6) | class AudioDevices:
    method list_audio_devices (line 12) | def list_audio_devices(
    method choose_device (line 46) | def choose_device(cls, devices, device_type="input"):

FILE: evi/evi-python-raw-api/src/main.py
  function main (line 19) | async def main():
  function get_access_token (line 74) | def get_access_token() -> str:

FILE: evi/evi-python-webhooks/app.py
  function hume_webhook_handler (line 25) | async def hume_webhook_handler(request: Request, event: WebhookEvent):

FILE: evi/evi-python-webhooks/utils.py
  function fetch_all_chat_events (line 14) | async def fetch_all_chat_events(client: AsyncHumeClient, chat_id: str) -...
  function construct_transcript (line 23) | def construct_transcript(chat_events: list[ReturnChatEvent]) -> str:
  function save_transcript_to_file (line 38) | def save_transcript_to_file(transcript: str, chat_id: str) -> None:
  function get_chat_transcript (line 46) | async def get_chat_transcript(client: AsyncHumeClient, chat_id: str) -> ...
  function validate_webhook_headers (line 53) | def validate_webhook_headers(payload: str, headers: Headers) -> None:
  function fetch_weather (line 101) | async def fetch_weather(parameters: str) -> str:
  function fetch_weather_tool (line 183) | async def fetch_weather_tool(

FILE: evi/evi-python-wss-clm-endpoint/agent.py
  function reflect (line 323) | def reflect(fragment):
  function eliza_response (line 334) | def eliza_response(user_input):

FILE: evi/evi-python-wss-clm-endpoint/app.py
  function root (line 12) | async def root():
  function websocket_handler (line 17) | async def websocket_handler(websocket: WebSocket) -> None:

FILE: evi/evi-python-wss-clm-endpoint/cdk/cdk.out/asset.689e46f5ffafa1e0f81f114b5dfd7694d2d1e291d9bd855e4f7b601d2b2403d0/agent.py
  function reflect (line 323) | def reflect(fragment):
  function eliza_response (line 334) | def eliza_response(user_input):

FILE: evi/evi-python-wss-clm-endpoint/cdk/cdk.out/asset.689e46f5ffafa1e0f81f114b5dfd7694d2d1e291d9bd855e4f7b601d2b2403d0/app.py
  function root (line 10) | async def root():
  function websocket_handler (line 15) | async def websocket_handler(websocket: WebSocket) -> None:

FILE: evi/evi-python-wss-clm-endpoint/cdk/cdk.out/asset.689e46f5ffafa1e0f81f114b5dfd7694d2d1e291d9bd855e4f7b601d2b2403d0/modal/modal_app.py
  function endpoint (line 14) | def endpoint():

FILE: evi/evi-python-wss-clm-endpoint/cdk/cdk.out/asset.ee7de53d64cc9d6248fa6aa550f92358f6c907b5efd6f3298aeab1b5e7ea358a/__entrypoint__.js
  function handler (line 1) | async function handler(event,context){const sanitizedEvent={...event,Res...
  function renderResponse (line 1) | function renderResponse(cfnRequest,handlerResponse={}){const physicalRes...
  function submitResponse (line 1) | async function submitResponse(status,event){const json={Status:status,Re...
  function defaultSendHttpRequest (line 1) | async function defaultSendHttpRequest(options,requestBody){return new Pr...
  function defaultLog (line 1) | function defaultLog(fmt,...params){console.log(fmt,...params)}
  function withRetries (line 1) | function withRetries(options,fn){return async(...xs)=>{let attempts=opti...
  function sleep (line 1) | async function sleep(ms){return new Promise(ok=>setTimeout(ok,ms))}

FILE: evi/evi-python-wss-clm-endpoint/cdk/cdk.out/asset.ee7de53d64cc9d6248fa6aa550f92358f6c907b5efd6f3298aeab1b5e7ea358a/index.js
  function c (line 1) | function c(r,e){return{GroupId:r,IpPermissions:[{UserIdGroupPairs:[{Grou...
  function d (line 1) | function d(r){return{GroupId:r,IpPermissions:[{IpRanges:[{CidrIp:"0.0.0....
  function f (line 1) | async function f(r){let e=r.ResourceProperties.DefaultSecurityGroupId,o=...
  function h (line 1) | async function h(r){let e=r.OldResourceProperties.DefaultSecurityGroupId...
  function p (line 1) | async function p(r,e){try{await u.revokeSecurityGroupEgress(d(r))}catch(...
  function m (line 1) | async function m(r,e){await u.authorizeSecurityGroupIngress(c(r,e)),awai...

FILE: evi/evi-python-wss-clm-endpoint/cdk/cdk.out/asset.f372550efb96be7f549f1d0346e8646080c1fe2b15c7c2e3b1dceb07b6656c54/agent.py
  function reflect (line 323) | def reflect(fragment):
  function eliza_response (line 334) | def eliza_response(user_input):

FILE: evi/evi-python-wss-clm-endpoint/cdk/cdk.out/asset.f372550efb96be7f549f1d0346e8646080c1fe2b15c7c2e3b1dceb07b6656c54/app.py
  function root (line 9) | async def root():
  function websocket_handler (line 13) | async def websocket_handler(websocket: WebSocket) -> None:

FILE: evi/evi-python-wss-clm-endpoint/cdk/cdk.out/asset.f372550efb96be7f549f1d0346e8646080c1fe2b15c7c2e3b1dceb07b6656c54/modal_app.py
  function endpoint (line 14) | def endpoint():

FILE: evi/evi-python-wss-clm-endpoint/cdk/cdk/eliza_stack.py
  class ElizaStack (line 9) | class ElizaStack(cdk.Stack):
    method __init__ (line 11) | def __init__(self, scope: cdk.App, id: str, **kwargs) -> None:

FILE: evi/evi-python-wss-clm-endpoint/modal/modal_app.py
  function endpoint (line 14) | def endpoint():

FILE: evi/evi-python-wss-clm-endpoint/modal_app.py
  function endpoint (line 14) | def endpoint():

FILE: evi/evi-react-native/App.tsx
  type ChatEntry (line 27) | interface ChatEntry {

FILE: evi/evi-react-native/VoiceIsolationModePrompt.tsx
  type VoiceIsolationModePromptProps (line 12) | interface VoiceIsolationModePromptProps {

FILE: evi/evi-react-native/modules/audio/src/AudioModule.ts
  class AudioModule (line 5) | class AudioModule extends NativeModule<AudioModuleEvents> {

FILE: evi/evi-react-native/modules/audio/src/AudioModule.types.ts
  type MicrophoneMode (line 1) | type MicrophoneMode = "N/A" | "Standard" | "Voice Isolation" | "Wide Spe...
  type AudioModuleEvents (line 3) | type AudioModuleEvents = {
  type AudioEventPayload (line 8) | type AudioEventPayload = {

FILE: evi/evi-react-native/modules/audio/src/AudioModule.web.ts
  method getPermissions (line 27) | async getPermissions(): Promise<boolean> {
  method startRecording (line 34) | async startRecording(): Promise<void> {
  method stopRecording (line 55) | async stopRecording(): Promise<void> {
  method enqueueAudio (line 64) | async enqueueAudio(base64EncodedAudio: string): Promise<void> {
  method mute (line 68) | async mute(): Promise<void> {
  method unmute (line 72) | async unmute(): Promise<void> {
  method stopPlayback (line 76) | async stopPlayback(): Promise<void> {
  method addListener (line 84) | async addListener(eventName: keyof AudioModuleEvents, f: AudioModuleEven...
  method showMicrophoneModes (line 89) | async showMicrophoneModes(): Promise<void> {
  method getMicrophoneMode (line 94) | async getMicrophoneMode(): Promise<MicrophoneMode> {

FILE: evi/evi-swift-chat/access_token_service/run_token_service.py
  function get_access_token (line 10) | def get_access_token():

FILE: evi/evi-touchdesigner/Scripts/HumeTD.py
  class HumeTDExt (line 3) | class HumeTDExt:
    method __init__ (line 4) | def __init__(self, ownerComp):
    method Send_user_input (line 7) | def Send_user_input(self, user_input: str):

FILE: evi/evi-touchdesigner/Scripts/MessagePlaback.py
  class MessagePlaybackExt (line 7) | class MessagePlaybackExt:
    method __init__ (line 8) | def __init__(self, owner_comp):
    method Handle_ws_msg (line 18) | def Handle_ws_msg(self, msg):
    method check_messages (line 24) | def check_messages(self):
    method Add_item (line 28) | def Add_item(self, audio_data):
    method Remove_item (line 35) | def Remove_item(self):
    method play_next_item (line 44) | def play_next_item(self):
    method get_audio_duration (line 55) | def get_audio_duration(self, filepath):

FILE: evi/evi-typescript-chat-history/src/index.ts
  function main (line 17) | async function main(): Promise<void> {
  function fetchAllChatEvents (line 57) | async function fetchAllChatEvents(chatId: string): Promise<Hume.empathic...
  function generateTranscript (line 89) | function generateTranscript(chatEvents: Hume.empathicVoice.ReturnChatEve...
  function getTopEmotions (line 118) | function getTopEmotions(chatEvents: Hume.empathicVoice.ReturnChatEvent[]...

FILE: evi/evi-typescript-function-calling/src/handleToolCall.ts
  function fetchWeather (line 6) | async function fetchWeather(location: string, format: string): Promise<s...
  function handleToolCallMessage (line 38) | async function handleToolCallMessage(

FILE: evi/evi-typescript-function-calling/src/main.ts
  function connect (line 74) | async function connect(): Promise<void> {
  function disconnect (line 101) | function disconnect(): void {
  function captureAudio (line 134) | async function captureAudio(): Promise<void> {
  function handleWebSocketOpenEvent (line 168) | async function handleWebSocketOpenEvent(): Promise<void> {
  function handleWebSocketMessageEvent (line 192) | async function handleWebSocketMessageEvent(
  function handleWebSocketErrorEvent (line 236) | function handleWebSocketErrorEvent(error: Error): void {
  function handleWebSocketCloseEvent (line 244) | async function handleWebSocketCloseEvent(): Promise<void> {
  function appendMessage (line 260) | function appendMessage(
  function toggleBtnStates (line 283) | function toggleBtnStates(): void {
  function extractTopThreeEmotions (line 292) | function extractTopThreeEmotions(
  type Score (line 322) | interface Score {
  type ChatMessage (line 327) | interface ChatMessage {
  class ChatCard (line 334) | class ChatCard {
    method constructor (line 337) | constructor(message: ChatMessage) {
    method createScoreItem (line 341) | private createScoreItem(score: Score): HTMLElement {
    method render (line 348) | public render(): HTMLElement {

FILE: evi/evi-typescript-proxy/app/api.ts
  class Api (line 4) | class Api {
    method broadcastState (line 9) | broadcastState(state: State): void {
    method getNextAPIEvent (line 17) | getNextAPIEvent(): AppEvent | undefined {
    method hasAPIEvents (line 22) | hasAPIEvents(): boolean {
    method handleRequest (line 27) | handleRequest(
    method handlePostAppEvent (line 46) | private handlePostAppEvent(
    method handleSubscribeAppEvent (line 70) | private handleSubscribeAppEvent(

FILE: evi/evi-typescript-proxy/app/cli.ts
  class CLI (line 15) | class CLI {
    method constructor (line 22) | constructor(PORT: number, WS_PATH: string) {
    method setState (line 27) | setState(state: State): void {
    method getNextCLIEvent (line 32) | getNextCLIEvent(): AppEvent | undefined {
    method menu (line 36) | private async menu(
    method playbackMenu (line 63) | private async playbackMenu(
    method errorSimulationMenu (line 87) | private async errorSimulationMenu(
    method getNextEvent (line 128) | async getNextEvent(): Promise<AppEvent> {
    method getNextEvent_ (line 142) | private async getNextEvent_(): Promise<AppEvent> {
    method maybePromptUserIfNeeded (line 301) | async maybePromptUserIfNeeded() {
    method runPromptLoop (line 308) | async runPromptLoop() {

FILE: evi/evi-typescript-proxy/app/downstream.ts
  type InferRaw (line 5) | type InferRaw<T> = T extends { parse: (raw: infer R) => any } ? R : never;
  type WebSocketErrorRaw (line 6) | type WebSocketErrorRaw = InferRaw<typeof serialization.empathicVoice.Web...
  class Downstream (line 14) | class Downstream extends EventEmitter {
    method constructor (line 18) | private constructor(
    method connect (line 25) | static connect({
    method broadcast (line 46) | broadcast(message: Message) {
    method close (line 55) | close() {
    method closeWithError (line 62) | closeWithError(code: number, reason: string) {
    method sendError (line 74) | sendError(error: WebSocketErrorRaw) {
    method logMessage (line 85) | private logMessage(message: { toString: () => string }) {
    method handleConnection (line 109) | handleConnection(ws: WebSocket): void {

FILE: evi/evi-typescript-proxy/app/main.ts
  constant DIST_DIR (line 21) | const DIST_DIR = path.join(__dirname, "../web/dist");
  constant PORT (line 23) | const PORT = 3000;
  constant DOWNSTREAM_WS_PATH (line 24) | const DOWNSTREAM_WS_PATH = "/v0/evi/chat";
  constant UPSTREAM_WS_BASE_URL (line 25) | const UPSTREAM_WS_BASE_URL = "wss://api.hume.ai";
  function shallowEqual (line 272) | function shallowEqual(a: any, b: any): boolean {
  function main (line 284) | async function main() {

FILE: evi/evi-typescript-proxy/app/upstream.ts
  method onMessage (line 13) | public onMessage(handler: (message: Message) => void): void {
  method onConnect (line 17) | public onConnect(handler: () => void): void {
  method onDisconnect (line 21) | public onDisconnect(handler: () => void): void {
  type ConnectArgs (line 26) | type ConnectArgs = {
  class LiveUpstream (line 31) | class LiveUpstream extends BaseUpstream {
    method connect (line 36) | public connect(args: ConnectArgs): void {
    method close (line 82) | public close(): void {
    method send (line 88) | public send(message: WSMessage): void {
  class PlaybackUpstream (line 99) | class PlaybackUpstream extends BaseUpstream {
    method setPlaybackMessages (line 104) | public setPlaybackMessages(messages: Message[]): void {
    method connect (line 109) | public connect(_args?: any): void {
    method close (line 115) | public close(): void {
    method send (line 121) | public send(_message: WSMessage): void {
  class UninitializedUpstream (line 134) | class UninitializedUpstream extends BaseUpstream {
    method connect (line 135) | public connect(_args?: any): void {
    method close (line 141) | public close(): void {
    method send (line 147) | public send(_message: WSMessage): void {

FILE: evi/evi-typescript-proxy/web/ChatControls.tsx
  function ChatControls (line 4) | function ChatControls() {

FILE: evi/evi-typescript-proxy/web/EVIChat.tsx
  function EVIChat (line 25) | function EVIChat({ accessToken }: { accessToken?: string }) {

FILE: evi/evi-typescript-proxy/web/StartCall.tsx
  function StartCall (line 5) | function StartCall() {

FILE: evi/evi-typescript-proxy/web/app.tsx
  function App (line 6) | function App() {

FILE: evi/evi-typescript-proxy/web/useProxyState.ts
  function useProxyState (line 4) | function useProxyState() {

FILE: evi/evi-typescript-quickstart/src/lib/audio.ts
  function startAudioCapture (line 30) | async function startAudioCapture(

FILE: evi/evi-typescript-quickstart/src/lib/evi.test.ts
  function waitForSocketOpen (line 284) | function waitForSocketOpen(socket: any): Promise<void> {
  function waitForChatMetadata (line 306) | function waitForChatMetadata(getSocket: () => any): Promise<string> {
  function fetchChatEvents (line 348) | async function fetchChatEvents(

FILE: evi/evi-typescript-quickstart/src/lib/evi.ts
  function getClient (line 6) | function getClient(apiKey: string): HumeClient {
  function connectEVI (line 33) | function connectEVI(

FILE: evi/evi-typescript-quickstart/src/lib/ui.ts
  function extractTopThreeEmotions (line 16) | function extractTopThreeEmotions(
  function appendChatMessage (line 38) | function appendChatMessage(

FILE: evi/evi-typescript-quickstart/src/main.ts
  function setConnected (line 72) | function setConnected(on: boolean): void {
  function handleOpen (line 78) | async function handleOpen() {
  function handleMessage (line 84) | async function handleMessage(msg: Hume.empathicVoice.chat.SubscribeEvent) {
  function handleError (line 111) | function handleError(err: Event | Error) {
  function handleClose (line 115) | function handleClose(e: unknown) {
  function connect (line 120) | function connect(
  function disconnect (line 142) | function disconnect() {

FILE: evi/evi-typescript-webhooks/src/main.ts
  constant PORT (line 15) | const PORT = 5000;

FILE: evi/evi-typescript-webhooks/src/util.ts
  function getWebhookSigningKey (line 14) | function getWebhookSigningKey(): string {
  function fetchAllChatEvents (line 29) | async function fetchAllChatEvents(client: HumeClient, chatId: string): P...
  function generateTranscript (line 46) | function generateTranscript(chatEvents: Hume.empathicVoice.ReturnChatEve...
  function saveTranscriptToFile (line 63) | async function saveTranscriptToFile(transcript: string, chatId: string):...
  function getChatTranscript (line 82) | async function getChatTranscript(client: HumeClient, chatId: string): Pr...
  function validateWebhookHeaders (line 96) | function validateWebhookHeaders(

FILE: evi/evi-unity-quickstart/Assets/Editor/AutoLoadDefaultScene.cs
  class AutoLoadDefaultScene (line 5) | [InitializeOnLoad]
    method AutoLoadDefaultScene (line 10) | static AutoLoadDefaultScene()
    method LoadDefaultSceneIfNeeded (line 15) | private static void LoadDefaultSceneIfNeeded()

FILE: evi/evi-unity-quickstart/Assets/Scripts/HumeEVI.cs
  class HumeEVI (line 8) | [RequireComponent(typeof(AudioSource))]
    type ConversationState (line 41) | public enum ConversationState
    method SetApiKey (line 65) | public void SetApiKey(string key)
    method Update (line 70) | void Update()
    method OnDestroy (line 86) | void OnDestroy()
    method StartConversation (line 91) | public async void StartConversation()
    method StopConversation (line 131) | public async void StopConversation()
    method ConnectToEVI (line 167) | private async Task ConnectToEVI()
    method SubscribeToEvents (line 204) | private void SubscribeToEvents()
    method ProcessAudioOutput (line 258) | private void ProcessAudioOutput(string base64Audio)
    method PlayNextAudioChunk (line 284) | private void PlayNextAudioChunk()
    method WaitForPlaybackComplete (line 330) | private System.Collections.IEnumerator WaitForPlaybackComplete(float d...
    method StartMicrophoneCapture (line 344) | private void StartMicrophoneCapture()
    method StopMicrophoneCapture (line 368) | private void StopMicrophoneCapture()
    method SendMicrophoneAudio (line 385) | private async void SendMicrophoneAudio()
    method ConvertWavToFloats (line 461) | private float[] ConvertWavToFloats(byte[] wavBytes)
    method ConvertS16LEToFloats (line 554) | private float[] ConvertS16LEToFloats(byte[] bytes)
    method ConvertU8ToFloats (line 570) | private float[] ConvertU8ToFloats(byte[] bytes)
    method ConvertFloatsToPCM (line 585) | private byte[] ConvertFloatsToPCM(float[] samples)
  class UnityMainThreadDispatcher (line 612) | public class UnityMainThreadDispatcher : MonoBehaviour
    method Awake (line 617) | void Awake()
    method Update (line 630) | void Update()
    method Enqueue (line 641) | public static void Enqueue(Action action)
    method Initialize (line 651) | [RuntimeInitializeOnLoadMethod(RuntimeInitializeLoadType.BeforeSceneLo...

FILE: evi/evi-unity-quickstart/Assets/Scripts/SceneBuilder.cs
  class SceneBuilder (line 3) | public class SceneBuilder : MonoBehaviour
    method Awake (line 8) | void Awake()
    method Start (line 13) | void Start()
    method BuildScene (line 18) | void BuildScene()
  class ConversationVisualFeedback (line 78) | public class ConversationVisualFeedback : MonoBehaviour
    method Initialize (line 106) | public void Initialize(HumeEVI eviComponent, Renderer renderer)
    method OnDestroy (line 118) | void OnDestroy()
    method Update (line 128) | void Update()
    method AnimateColor (line 139) | private void AnimateColor()
    method GetBaseColorForState (line 157) | private Color GetBaseColorForState(HumeEVI.ConversationState state)
    method OnStateChanged (line 172) | private void OnStateChanged(HumeEVI.ConversationState newState)
    method UpdateInstructionText (line 180) | private void UpdateInstructionText()
    method OnUserTranscript (line 205) | private void OnUserTranscript(string transcript)
    method OnAssistantMessage (line 211) | private void OnAssistantMessage(string message)
    method UpdateTranscriptDisplay (line 217) | private void UpdateTranscriptDisplay()
  class ClickToConverse (line 247) | public class ClickToConverse : MonoBehaviour
    method Start (line 251) | void Start()
    method OnMouseDown (line 256) | void OnMouseDown()

FILE: expression-measurement/batch/next-js-emotional-language/src/lib/mutations/processTextFile.ts
  function processTextFile (line 10) | async function processTextFile(fileUrl: string) {
  function sendFile (line 36) | async function sendFile(fileUrl: string) {
  function pollForResultsUrl (line 52) | async function pollForResultsUrl(jobId: string, maxAttempts: number) {
  function fetchResultsFile (line 85) | async function fetchResultsFile(url: string) {

FILE: expression-measurement/batch/next-js-emotional-language/src/lib/utils.ts
  function cn (line 4) | function cn(...inputs: ClassValue[]) {

FILE: expression-measurement/batch/next-js-emotional-language/src/pages/_app.tsx
  function App (line 18) | function App({ Component, pageProps }: AppProps) {

FILE: expression-measurement/batch/next-js-emotional-language/src/pages/_document.tsx
  function Document (line 3) | function Document() {

FILE: expression-measurement/batch/next-js-emotional-language/src/pages/api/results.ts
  function handler (line 8) | async function handler(

FILE: expression-measurement/batch/next-js-emotional-language/src/pages/api/send.ts
  type Data (line 7) | type Data = {
  function handler (line 11) | async function handler(

FILE: expression-measurement/batch/next-js-emotional-language/src/pages/index.tsx
  function Home (line 19) | function Home() {

FILE: expression-measurement/batch/python-top-emotions/top_emotions.py
  function main (line 10) | async def main():
  function poll_for_completion (line 55) | async def poll_for_completion(client: AsyncHumeClient, job_id, timeout=1...
  function poll_until_complete (line 68) | async def poll_until_complete(client: AsyncHumeClient, job_id):
  function process_predictions (line 121) | def process_predictions(job_predictions: List[UnionPredictResult], start...

FILE: expression-measurement/streaming/next-js-streaming-example/components/inputs/Button.tsx
  type ButtonProps (line 4) | type ButtonProps = React.HTMLAttributes<HTMLDivElement> & {
  function Button (line 11) | function Button({ className, variant, text, onClick, tooltip }: ButtonPr...

FILE: expression-measurement/streaming/next-js-streaming-example/components/inputs/TextArea.tsx
  type TextAreaProps (line 5) | type TextAreaProps = {
  function TextArea (line 14) | function TextArea({ className, inputClassName, text, placeholder, onChan...

FILE: expression-measurement/streaming/next-js-streaming-example/components/inputs/TextBox.tsx
  type TextBoxProps (line 4) | type TextBoxProps = {
  function TextBox (line 15) | function TextBox({

FILE: expression-measurement/streaming/next-js-streaming-example/components/menu/Auth.tsx
  type ChildElement (line 7) | type ChildElement = JSX.Element | string;
  type AuthState (line 16) | type AuthState = {
  type AuthProps (line 23) | type AuthProps = {
  function Auth (line 27) | function Auth({ children }: AuthProps) {

FILE: expression-measurement/streaming/next-js-streaming-example/components/menu/Login.tsx
  type LoginProps (line 6) | type LoginProps = {
  function Login (line 10) | function Login({ authenticate }: LoginProps) {

FILE: expression-measurement/streaming/next-js-streaming-example/components/menu/Nav.tsx
  function Nav (line 3) | function Nav() {
  type NavItemProps (line 25) | type NavItemProps = {
  function NavItem (line 30) | function NavItem({ route, name }: NavItemProps) {

FILE: expression-measurement/streaming/next-js-streaming-example/components/menu/Toolbar.tsx
  function Toolbar (line 5) | function Toolbar() {

FILE: expression-measurement/streaming/next-js-streaming-example/components/widgets/AudioWidgets.tsx
  type AudioWidgetsProps (line 12) | interface AudioWidgetsProps {
  function AudioWidgets (line 19) | function AudioWidgets({ modelName, recordingLengthMs, streamWindowLength...

FILE: expression-measurement/streaming/next-js-streaming-example/components/widgets/BurstWidgets.tsx
  type BurstWidgetsProps (line 6) | type BurstWidgetsProps = {
  function BurstWidgets (line 10) | function BurstWidgets({ onTimeline }: BurstWidgetsProps) {

FILE: expression-measurement/streaming/next-js-streaming-example/components/widgets/Descriptor.tsx
  type DescriptorProps (line 6) | type DescriptorProps = {
  function Descriptor (line 11) | function Descriptor({ className, emotions }: DescriptorProps) {

FILE: expression-measurement/streaming/next-js-streaming-example/components/widgets/DiscreteTimeline.tsx
  type DiscreteTimelineProps (line 6) | type DiscreteTimelineProps = {
  function DiscreteTimeline (line 11) | function DiscreteTimeline({ className, predictions }: DiscreteTimelinePr...
  type DetectionProps (line 99) | type DetectionProps = {
  function Detection (line 104) | function Detection({ className, detection }: DetectionProps) {

FILE: expression-measurement/streaming/next-js-streaming-example/components/widgets/FaceTrackedVideo.tsx
  type FaceTrackedVideoProps (line 5) | type FaceTrackedVideoProps = {
  function FaceTrackedVideo (line 13) | function FaceTrackedVideo({ className, trackedFaces, onVideoReady, width...

FILE: expression-measurement/streaming/next-js-streaming-example/components/widgets/FaceWidgets.tsx
  type FaceWidgetsProps (line 16) | type FaceWidgetsProps = {
  function FaceWidgets (line 20) | function FaceWidgets({ onCalibrate }: FaceWidgetsProps) {

FILE: expression-measurement/streaming/next-js-streaming-example/components/widgets/LanguageWidgets.tsx
  function LanguageWidgets (line 10) | function LanguageWidgets() {

FILE: expression-measurement/streaming/next-js-streaming-example/components/widgets/Loader.tsx
  type LoaderProps (line 5) | type LoaderProps = {
  function Loader (line 12) | function Loader({ className, emotions, emotionName, numLevels }: LoaderP...

FILE: expression-measurement/streaming/next-js-streaming-example/components/widgets/LoaderSet.tsx
  type LoaderProps (line 5) | type LoaderProps = {
  function LoaderSet (line 12) | function LoaderSet({ className, emotions, emotionNames, numLevels }: Loa...

FILE: expression-measurement/streaming/next-js-streaming-example/components/widgets/ProsodyWidgets.tsx
  function ProsodyWidgets (line 3) | function ProsodyWidgets() {

FILE: expression-measurement/streaming/next-js-streaming-example/components/widgets/TopEmotions.tsx
  type TopEmotionsProps (line 3) | type TopEmotionsProps = {
  function TopEmotions (line 9) | function TopEmotions({ className, emotions, numEmotions }: TopEmotionsPr...

FILE: expression-measurement/streaming/next-js-streaming-example/lib/data/audioPrediction.ts
  type AudioPrediction (line 4) | type AudioPrediction = {

FILE: expression-measurement/streaming/next-js-streaming-example/lib/data/boundingBox.ts
  type BoundingBox (line 1) | type BoundingBox = {

FILE: expression-measurement/streaming/next-js-streaming-example/lib/data/characterRange.ts
  type CharacterRange (line 1) | type CharacterRange = {

FILE: expression-measurement/streaming/next-js-streaming-example/lib/data/embedding.ts
  type Embedding (line 1) | type Embedding = number[];

FILE: expression-measurement/streaming/next-js-streaming-example/lib/data/emotion.ts
  type Emotion (line 1) | type Emotion = {
  type EmotionName (line 6) | type EmotionName =

FILE: expression-measurement/streaming/next-js-streaming-example/lib/data/facePrediction.ts
  type FacePrediction (line 4) | type FacePrediction = {

FILE: expression-measurement/streaming/next-js-streaming-example/lib/data/languagePrediction.ts
  type LanguagePrediction (line 4) | type LanguagePrediction = {

FILE: expression-measurement/streaming/next-js-streaming-example/lib/data/range.ts
  type Range (line 1) | type Range = {

FILE: expression-measurement/streaming/next-js-streaming-example/lib/data/timeRange.ts
  type TimeRange (line 1) | type TimeRange = {

FILE: expression-measurement/streaming/next-js-streaming-example/lib/data/trackedFace.ts
  type TrackedFace (line 3) | type TrackedFace = {

FILE: expression-measurement/streaming/next-js-streaming-example/lib/hooks/keyPress.ts
  function useKeypress (line 3) | function useKeypress(key: string, callback: () => void, deps: Dependency...

FILE: expression-measurement/streaming/next-js-streaming-example/lib/hooks/stability.ts
  function useStableEmotions (line 6) | function useStableEmotions(emotions: Emotion[], embeddingDistThreshold: ...

FILE: expression-measurement/streaming/next-js-streaming-example/lib/hooks/storage.ts
  function useStorage (line 5) | function useStorage(key: string) {

FILE: expression-measurement/streaming/next-js-streaming-example/lib/media/audioRecorder.ts
  class AudioRecorder (line 3) | class AudioRecorder {
    method constructor (line 7) | private constructor(recorder: MediaRecorder, mediaStream: MediaStream) {
    method create (line 12) | static async create(): Promise<AudioRecorder> {
    method stopRecording (line 19) | async stopRecording() {
    method record (line 25) | record(length: number): Promise<Blob> {

FILE: expression-measurement/streaming/next-js-streaming-example/lib/media/videoRecorder.ts
  type Size (line 3) | type Size = {
  class VideoRecorder (line 8) | class VideoRecorder {
    method constructor (line 14) | private constructor(
    method create (line 27) | static async create(videoElement: HTMLVideoElement, photoElement: HTML...
    method stopRecording (line 38) | async stopRecording() {
    method setVideoSize (line 44) | private static setVideoSize(videoElement: HTMLVideoElement, photoEleme...
    method takePhoto (line 64) | async takePhoto(format: string = "image/png"): Promise<Blob> {

FILE: expression-measurement/streaming/next-js-streaming-example/lib/utilities/asyncUtilities.ts
  function sleep (line 1) | function sleep(delay: number) {

FILE: expression-measurement/streaming/next-js-streaming-example/lib/utilities/blobUtilities.ts
  function blobToBase64 (line 1) | function blobToBase64(blob: Blob) {
  function canvasToImageBlob (line 14) | function canvasToImageBlob(canvas: HTMLCanvasElement, format: string = "...

FILE: expression-measurement/streaming/next-js-streaming-example/lib/utilities/embeddingUtilities.ts
  function emotionsToEmbedding (line 6) | function emotionsToEmbedding(emotions: Emotion[]): Embedding {
  function emotionDist (line 21) | function emotionDist(emotionsA: Emotion[], emotionsB: Emotion[]): number {
  function emotionsToScoreMap (line 25) | function emotionsToScoreMap(emotions: Emotion[]): Map<EmotionName, numbe...
  function embeddingDist (line 34) | function embeddingDist(embeddingA: Embedding, embeddingB: Embedding): nu...

FILE: expression-measurement/streaming/next-js-streaming-example/lib/utilities/emotionUtilities.ts
  type EmotionInfo (line 5) | type EmotionInfo = {
  constant CANONICAL_EMOTION_NAMES (line 10) | const CANONICAL_EMOTION_NAMES: EmotionName[] = [
  constant DESCRIPTOR_MAP (line 61) | const DESCRIPTOR_MAP: Map<EmotionName, Optional<string>> = new Map([
  function getEmotionDescriptor (line 112) | function getEmotionDescriptor(name: EmotionName): Optional<string> {

FILE: expression-measurement/streaming/next-js-streaming-example/lib/utilities/environmentUtilities.ts
  type Environment (line 9) | enum Environment {
  function parseEnvironment (line 13) | function parseEnvironment(env: string): Environment {
  function getApiUrlHttp (line 17) | function getApiUrlHttp(environment: Environment): string {
  function getApiUrlWs (line 21) | function getApiUrlWs(environment: Environment): string {

FILE: expression-measurement/streaming/next-js-streaming-example/lib/utilities/scalingUtilities.ts
  function scaleEmotionsToRanges (line 5) | function scaleEmotionsToRanges(emotions: Emotion[]): Emotion[] {
  function scale (line 24) | function scale(value: number, range: Range) {
  function clip (line 29) | function clip(value: number, range: Range) {
  constant RANGE_MAP (line 33) | const RANGE_MAP = new Map<EmotionName, Range>([

FILE: expression-measurement/streaming/next-js-streaming-example/lib/utilities/styleUtilities.ts
  function cn (line 5) | function cn(...inputs: ClassValue[]) {

FILE: expression-measurement/streaming/next-js-streaming-example/lib/utilities/typeUtilities.ts
  type Optional (line 1) | type Optional<T> = T | undefined;

FILE: expression-measurement/streaming/next-js-streaming-example/pages/_app.tsx
  function App (line 14) | function App({ Component, pageProps }: AppProps) {

FILE: expression-measurement/streaming/next-js-streaming-example/pages/burst/index.tsx
  function BurstPage (line 3) | function BurstPage() {

FILE: expression-measurement/streaming/next-js-streaming-example/pages/burst/timeline/index.tsx
  function BurstTimelinePage (line 4) | function BurstTimelinePage() {

FILE: expression-measurement/streaming/next-js-streaming-example/pages/face/calibrate/index.tsx
  function FaceCalibratePage (line 7) | function FaceCalibratePage() {

FILE: expression-measurement/streaming/next-js-streaming-example/pages/face/index.tsx
  function FacePage (line 3) | function FacePage() {

FILE: expression-measurement/streaming/next-js-streaming-example/pages/index.tsx
  function HomePage (line 10) | function HomePage() {
  type ModelSectionProps (line 28) | type ModelSectionProps = {
  function ModelSection (line 34) | function ModelSection(props: ModelSectionProps) {

FILE: expression-measurement/streaming/next-js-streaming-example/pages/language/index.tsx
  function LanguagePage (line 3) | function LanguagePage() {

FILE: expression-measurement/streaming/next-js-streaming-example/pages/prosody/index.tsx
  function ProsodyPage (line 3) | function ProsodyPage() {

FILE: expression-measurement/streaming/python-streaming-example/main.py
  class Result (line 27) | class Result(TypedDict):
  function process_emotion_scores (line 31) | def process_emotion_scores(event: StreamModelPredictions) -> List[Result]:
  function print_emotion_summary (line 46) | def print_emotion_summary(result: Result) -> None:
  function streaming_example (line 57) | async def streaming_example() -> None:
  function main (line 93) | def main():

FILE: expression-measurement/streaming/python-streaming-example/test_main.py
  function api_key (line 18) | def api_key():
  function test_send_hello_world_returns_non_empty_emotion_analysis (line 26) | async def test_send_hello_world_returns_non_empty_emotion_analysis(api_k...

FILE: tts/tts-dotnet-quickstart/Program.cs
  class Program (line 16) | class Program
    method RunExamplesAsync (line 47) | static async Task RunExamplesAsync()
    method Main (line 69) | static async Task Main(string[] args)
    method Example1Async (line 81) | static async Task Example1Async()
    method Example2Async (line 101) | static async Task Example2Async()
    method Example3Async (line 194) | static async Task Example3Async()
    method StreamAudioToPlayerAsync (line 248) | private static async Task StreamAudioToPlayerAsync<T>(
    class SilenceFiller (line 267) | public class SilenceFiller : IDisposable
      method SilenceFiller (line 279) | public SilenceFiller(Stream outputStream)
      method RunFillerLoop (line 285) | private async Task RunFillerLoop()
      method WriteAudio (line 321) | public void WriteAudio(byte[] audioBytes)
      method EndStreamAsync (line 329) | public async Task EndStreamAsync()
      method Dispose (line 335) | public void Dispose()
    class StreamingAudioPlayer (line 346) | public class StreamingAudioPlayer : IDisposable
      method StreamingAudioPlayer (line 358) | public StreamingAudioPlayer(bool usePcmFormat = false)
      method StartAsync (line 368) | public Task StartAsync()
      method WriteAudio (line 375) | public void WriteAudio(byte[] audioBytes)
      method StopAsync (line 390) | public async Task StopAsync()
      method StartAudioProcess (line 407) | private void StartAudioProcess()
      method Dispose (line 438) | public void Dispose()
    method StartAudioPlayer (line 452) | private static StreamingAudioPlayer StartAudioPlayer(bool usePcmFormat...

FILE: tts/tts-dotnet-quickstart/StreamingTtsService.cs
  class Queue (line 27) | internal class Queue<T>
    method Push (line 34) | public void Push(T x)
    method End (line 51) | public void End()
    method GetAsyncEnumerable (line 68) | public async IAsyncEnumerable<T> GetAsyncEnumerable()
  class StreamingTtsClient (line 115) | public class StreamingTtsClient : IDisposable
    method StreamingTtsClient (line 132) | public StreamingTtsClient(string apiKey, bool enableDebugLogging = false)
    method ConnectAsync (line 143) | public async Task ConnectAsync()
    method SendAsync (line 208) | public async Task SendAsync(object message)
    method SendFlushAsync (line 216) | public async Task SendFlushAsync()
    method SendCloseAsync (line 222) | public async Task SendCloseAsync()
    method ReceiveAudioChunksAsync (line 228) | public async IAsyncEnumerable<SnippetAudioChunk> ReceiveAudioChunksAsy...
    method LogDebug (line 258) | private void LogDebug(string message)
    method Dispose (line 266) | public void Dispose()

FILE: tts/tts-dotnet-quickstart/TtsTests.cs
  class TtsTestFixture (line 14) | public class TtsTestFixture : IAsyncLifetime
    method InitializeAsync (line 19) | public Task InitializeAsync()
    method DisposeAsync (line 39) | public Task DisposeAsync()
  class TtsJsonStreamTests (line 45) | [Collection("TtsTests")]
    method TtsJsonStreamTests (line 50) | public TtsJsonStreamTests(TtsTestFixture fixture)
    method TestFixture_HasApiKey (line 55) | [Fact(DisplayName = "test fixture has API key")]
    method GeneratesJsonStream_WithOctave1 (line 62) | [Fact(DisplayName = "connects w/ API key, generates JSON stream w/ Oct...
    method GeneratesJsonStream_WithOctave2AndTimestamps (line 88) | [Fact(DisplayName = "connects w/ API key, generates JSON stream w/ Oct...
  class TtsStreamInputTests (line 144) | [Collection("TtsTests")]
    method TtsStreamInputTests (line 149) | public TtsStreamInputTests(TtsTestFixture fixture)
    method CreatesStreamAndConnectsSuccessfully (line 154) | [Fact(DisplayName = "StreamingTtsClient: creates a bidirectional strea...
    method SendsMessagesAndReceivesAudioChunks (line 170) | [Fact(DisplayName = "StreamingTtsClient: sends messages and receives a...
  class TtsTestCollection (line 208) | [CollectionDefinition("TtsTests")]

FILE: tts/tts-next-js-agora/app/api/generate-agora-token/route.ts
  constant APP_ID (line 4) | const APP_ID =
  constant APP_CERTIFICATE (line 9) | const APP_CERTIFICATE =
  constant EXPIRATION_SECONDS (line 13) | const EXPIRATION_SECONDS = Number(
  function generateChannelName (line 19) | function generateChannelName (): string
  function GET (line 26) | async function GET ( request: NextRequest )

FILE: tts/tts-next-js-agora/app/api/invite-agent/route.ts
  constant DEFAULT_BASE_URL (line 10) | const DEFAULT_BASE_URL =
  constant AGORA_APP_ID (line 13) | const AGORA_APP_ID =
  constant AGORA_BASE_URL (line 18) | const AGORA_BASE_URL =
  constant AGORA_CUSTOMER_ID (line 23) | const AGORA_CUSTOMER_ID =
  constant AGORA_CUSTOMER_SECRET (line 25) | const AGORA_CUSTOMER_SECRET =
  constant AGORA_AGENT_UID (line 29) | const AGORA_AGENT_UID =
  constant HUME_API_KEY (line 32) | const HUME_API_KEY =
  constant HUME_VOICE_ID (line 34) | const HUME_VOICE_ID =
  constant OPENAI_API_KEY (line 37) | const OPENAI_API_KEY =
  constant OPENAI_MODEL (line 39) | const OPENAI_MODEL =
  function validateEnvironment (line 42) | function validateEnvironment ()
  function createAuthorizationHeader (line 61) | function createAuthorizationHeader (): string
  function getConfig (line 67) | function getConfig ()
  function POST (line 95) | async function POST ( request: Request )

FILE: tts/tts-next-js-agora/app/api/stop-conversation/route.ts
  constant AGORA_APP_ID (line 3) | const AGORA_APP_ID =
  constant AGORA_BASE_URL (line 8) | const AGORA_BASE_URL =
  constant AGORA_CUSTOMER_ID (line 13) | const AGORA_CUSTOMER_ID =
  constant AGORA_CUSTOMER_SECRET (line 15) | const AGORA_CUSTOMER_SECRET =
  type StopRequest (line 20) | type StopRequest = {
  function POST (line 24) | async function POST ( request: Request )

FILE: tts/tts-next-js-agora/app/layout.tsx
  function RootLayout (line 12) | function RootLayout({

FILE: tts/tts-next-js-agora/app/page.tsx
  function Page (line 35) | function Page() {

FILE: tts/tts-next-js-agora/components/AudioVisualizer.tsx
  type AudioVisualizerProps (line 6) | interface AudioVisualizerProps {
  function AudioVisualizer (line 10) | function AudioVisualizer({ track }: AudioVisualizerProps) {

FILE: tts/tts-next-js-agora/components/ConversationComponent.tsx
  function ConversationComponent (line 25) | function ConversationComponent({

FILE: tts/tts-next-js-agora/components/ConvoTextStream.tsx
  type Props (line 7) | interface Props {
  function ConvoTextStream (line 13) | function ConvoTextStream({

FILE: tts/tts-next-js-agora/components/MicrophoneButton.tsx
  type MicrophoneButtonProps (line 7) | interface MicrophoneButtonProps {
  function MicrophoneButton (line 13) | function MicrophoneButton({

FILE: tts/tts-next-js-agora/lib/message.ts
  constant CONSOLE_LOG_PREFIX (line 3) | const CONSOLE_LOG_PREFIX = '[MessageService]';
  constant DEFAULT_MESSAGE_CACHE_TIMEOUT (line 4) | const DEFAULT_MESSAGE_CACHE_TIMEOUT = 1000 * 60 * 5;
  type TDataChunk (line 12) | type TDataChunk = {
  type EMessageStatus (line 19) | enum EMessageStatus
  type ETranscriptionObjectType (line 26) | enum ETranscriptionObjectType
  type IMessageListItem (line 33) | interface IMessageListItem
  type IMessageArrayItem (line 41) | interface IMessageArrayItem<T>
  type ITranscriptionBase (line 51) | interface ITranscriptionBase
  type IUserTranscription (line 64) | interface IUserTranscription extends ITranscriptionBase
  type IAgentTranscription (line 70) | interface IAgentTranscription extends ITranscriptionBase
  type IMessageInterrupt (line 78) | interface IMessageInterrupt
  class MessageEngine (line 92) | class MessageEngine
    method constructor (line 106) | constructor (
    method _listenRtcEvents (line 117) | private _listenRtcEvents ()
    method handleStreamMessage (line 127) | public handleStreamMessage ( stream: Uint8Array )
    method handleMessage (line 140) | public handleMessage (
    method handleTextMessage (line 170) | private handleTextMessage ( message: IUserTranscription )
    method handleMessageInterrupt (line 200) | private handleMessageInterrupt ( message: IMessageInterrupt )
    method _appendChatHistory (line 212) | private _appendChatHistory (
    method _mutateChatHistory (line 220) | private _mutateChatHistory ()
    method handleChunk (line 239) | public handleChunk<T> (
    method streamMessage2Chunk (line 311) | public streamMessage2Chunk ( stream: Uint8Array ): string
    method cleanup (line 316) | public cleanup ()

FILE: tts/tts-next-js-agora/types/agora-rtc-react.d.ts
  type UID (line 4) | type UID = number | string;
  type IRemoteAudioTrack (line 6) | interface IRemoteAudioTrack
  type ILocalAudioTrack (line 11) | interface ILocalAudioTrack extends IRemoteAudioTrack
  type IMicrophoneAudioTrack (line 17) | interface IMicrophoneAudioTrack extends ILocalAudioTrack { }
  type IAgoraRTCClient (line 19) | interface IAgoraRTCClient
  type AgoraRTCProviderProps (line 59) | interface AgoraRTCProviderProps

FILE: tts/tts-next-js-agora/types/conversation.ts
  type AgoraTokenData (line 3) | interface AgoraTokenData
  type ClientStartRequest (line 12) | interface ClientStartRequest
  type StopConversationRequest (line 22) | interface StopConversationRequest
  type ConversationComponentProps (line 27) | interface ConversationComponentProps
  type AgentResponse (line 34) | interface AgentResponse
  type HumeTTSParams (line 41) | interface HumeTTSParams
  type AgoraStartRequest (line 51) | interface AgoraStartRequest
  type TTSConfig (line 95) | interface TTSConfig
  type TokenRenewalHandler (line 101) | interface TokenRenewalHandler

FILE: tts/tts-next-js-chat/src/app/api/chat/route.ts
  function POST (line 4) | async function POST(req: Request) {
  constant SYSTEM_PROMPT (line 25) | const SYSTEM_PROMPT = `

FILE: tts/tts-next-js-chat/src/app/api/transcribe/route.ts
  function POST (line 3) | async function POST(req: Request) {

FILE: tts/tts-next-js-chat/src/app/api/tts/route.ts
  function POST (line 10) | async function POST(req: NextRequest) {

FILE: tts/tts-next-js-chat/src/app/api/voices/route.ts
  function GET (line 5) | async function GET(req: NextRequest) {

FILE: tts/tts-next-js-chat/src/app/components/AudioPlayer.tsx
  function AudioPlayer (line 4) | function AudioPlayer({ chunks }: { chunks: Uint8Array[] }) {

FILE: tts/tts-next-js-chat/src/app/components/Chat.tsx
  function Chat (line 14) | function Chat() {

FILE: tts/tts-next-js-chat/src/app/components/ControlsPanel.tsx
  function ControlsPanel (line 8) | function ControlsPanel() {

FILE: tts/tts-next-js-chat/src/app/components/VoiceSelector.tsx
  type Props (line 4) | interface Props {
  function VoiceSelector (line 10) | function VoiceSelector({

FILE: tts/tts-next-js-chat/src/app/components/logos/Hume.tsx
  type HumeLogoProps (line 4) | type HumeLogoProps = SVGAttributes<SVGSVGElement>;
  function HumeLogo (line 6) | function HumeLogo(props: HumeLogoProps) {

FILE: tts/tts-next-js-chat/src/app/context/VoiceSettingsContext.tsx
  type VoiceSettings (line 5) | interface VoiceSettings {
  function VoiceSettingsProvider (line 15) | function VoiceSettingsProvider({ children }: { children: ReactNode }) {
  function useVoiceSettings (line 35) | function useVoiceSettings() {

FILE: tts/tts-next-js-chat/src/app/hooks/useRecording.ts
  function useRecording (line 3) | function useRecording(onTranscribed: (text: string) => void) {

FILE: tts/tts-next-js-chat/src/app/hooks/useTts.ts
  type AudioChunks (line 5) | type AudioChunks = Record<string, Uint8Array[]>;
  function useTts (line 7) | function useTts(options: {

FILE: tts/tts-next-js-chat/src/app/hooks/useVoices.ts
  function useVoices (line 4) | function useVoices(provider: VoiceProvider) {

FILE: tts/tts-next-js-chat/src/app/layout.tsx
  function RootLayout (line 21) | function RootLayout({

FILE: tts/tts-next-js-chat/src/app/page.tsx
  function Home (line 7) | function Home() {

FILE: tts/tts-next-js-vercel-ai-sdk/src/actions/generate-speech.ts
  function tts (line 10) | async function tts(formData: FormData): Promise<{

FILE: tts/tts-next-js-vercel-ai-sdk/src/actions/list-voices.ts
  function listVoices (line 9) | async function listVoices(): Promise<Hume.tts.ReturnVoice[]> {

FILE: tts/tts-next-js-vercel-ai-sdk/src/app/layout.tsx
  function RootLayout (line 21) | function RootLayout({

FILE: tts/tts-next-js-vercel-ai-sdk/src/app/page.tsx
  constant DEFAULT_VOICE_ID (line 11) | const DEFAULT_VOICE_ID = "9e068547-5ba4-4c8e-8e03-69282a008f04";
  function Page (line 13) | function Page() {

FILE: tts/tts-next-js-vercel-ai-sdk/src/components/AudioClipCard.tsx
  type ClipCardProps (line 5) | interface ClipCardProps {
  function AudioClipCard (line 12) | function AudioClipCard({ voiceName, text, instructions, url }: ClipCardP...

FILE: tts/tts-next-js-vercel-ai-sdk/src/components/AudioGallery.tsx
  type AudioGalleryProps (line 8) | interface AudioGalleryProps {
  function AudioGallery (line 13) | function AudioGallery({ clips, voices }: AudioGalleryProps) {

FILE: tts/tts-next-js-vercel-ai-sdk/src/components/TextAreaField.tsx
  type TextAreaFieldProps (line 5) | interface TextAreaFieldProps {
  function TextAreaField (line 15) | function TextAreaField({

FILE: tts/tts-next-js-vercel-ai-sdk/src/components/TtsForm.tsx
  type TtsFormProps (line 8) | interface TtsFormProps {
  function TtsForm (line 16) | function TtsForm({

FILE: tts/tts-next-js-vercel-ai-sdk/src/components/VoiceSelect.tsx
  type VoiceSelectProps (line 6) | interface VoiceSelectProps {
  function VoiceSelect (line 12) | function VoiceSelect({ voices, selectedVoiceId, onChange }: VoiceSelectP...

FILE: tts/tts-next-js-vercel-ai-sdk/src/components/logos/Hume.tsx
  type HumeLogoProps (line 4) | type HumeLogoProps = SVGAttributes<SVGSVGElement>;
  function HumeLogo (line 6) | function HumeLogo(props: HumeLogoProps) {

FILE: tts/tts-next-js-vercel-ai-sdk/src/hooks/useVoices.ts
  function useVoices (line 5) | function useVoices(defaultId: string) {

FILE: tts/tts-next-js-vercel-ai-sdk/src/types/clip.ts
  type Clip (line 1) | interface Clip {

FILE: tts/tts-python-livekit/src/agent_session/main.py
  class VoiceAssistant (line 18) | class VoiceAssistant(Agent):
    method __init__ (line 23) | def __init__(self):
  function entrypoint (line 27) | async def entrypoint(ctx: JobContext) -> None:

FILE: tts/tts-python-livekit/src/standalone_tts/main.py
  function synthesize_text (line 14) | async def synthesize_text(text: str, session: ClientSession) -> bytes:
  function interactive_repl (line 37) | async def interactive_repl() -> None:

FILE: tts/tts-python-livekit/src/utils.py
  function validate_env_vars (line 9) | def validate_env_vars(env_vars: list[str]) -> None:

FILE: tts/tts-python-quickstart/app.py
  function example1 (line 45) | async def example1():
  function example2 (line 60) | async def example2():
  function example3 (line 135) | async def example3():
  function main (line 164) | async def main():

FILE: tts/tts-python-quickstart/conftest.py
  function pytest_collection_modifyitems (line 11) | def pytest_collection_modifyitems(config, items):

FILE: tts/tts-python-quickstart/test_app.py
  function create_audio_collector (line 22) | def create_audio_collector():
  function assert_valid_audio_bytes (line 33) | def assert_valid_audio_bytes(chunks: list, *, min_chunks: int = 1):
  function assert_valid_audio_chunk (line 40) | def assert_valid_audio_chunk(chunk):
  function test_example1_runs_successfully (line 52) | async def test_example1_runs_successfully():
  function test_example2_runs_successfully (line 67) | async def test_example2_runs_successfully(hume_client):
  function test_example3_runs_successfully (line 94) | async def test_example3_runs_successfully():
  function api_key (line 119) | def api_key():
  function hume_client (line 127) | def hume_client(api_key):
  function test_generates_json_with_octave_1 (line 132) | async def test_generates_json_with_octave_1(hume_client):
  function test_generates_json_with_octave_2_with_timestamps (line 151) | async def test_generates_json_with_octave_2_with_timestamps(hume_client):
  function test_creates_stream_and_connects_successfully (line 190) | async def test_creates_stream_and_connects_successfully(hume_client):
  function test_sends_messages_and_receives_audio_chunks (line 201) | async def test_sends_messages_and_receives_audio_chunks(hume_client):

FILE: tts/tts-swift-quickstart/access_token_service/run_token_service.py
  function get_access_token (line 10) | def get_access_token():

FILE: tts/tts-typescript-lipsync/index.tsx
  function synthesize (line 28) | async function synthesize() {

FILE: tts/tts-typescript-lipsync/mouth.ts
  type Point2D (line 1) | type Point2D = [number, number];
  type MouthShape (line 2) | type MouthShape = Point2D[];
  type PhonemeEvent (line 4) | type PhonemeEvent = {
  function mirror (line 11) | function mirror(leftSide: [number, number][]): MouthShape {
  type Viseme (line 31) | type Viseme =
  constant VISEME_SHAPES (line 48) | const VISEME_SHAPES: Record<Viseme, MouthShape> = {
  constant PHONEME_MAP (line 96) | const PHONEME_MAP: Array<[Viseme, string[]]> = [
  function phonemeToViseme (line 114) | function phonemeToViseme(phoneme: string): Viseme {
  class Mouth (line 128) | class Mouth {
    method addPhoneme (line 134) | addPhoneme(phoneme: string, timestamp: number): void {
    method getShapeAt (line 149) | getShapeAt(time: number): MouthShape {
    method clearAfter (line 180) | clearAfter(timestamp: number): void {
    method reset (line 188) | reset(): void {
  function drawMouth (line 202) | function drawMouth(ctx: CanvasRenderingContext2D, shape: MouthShape, wid...
  class MouthAnimation (line 234) | class MouthAnimation {
    method constructor (line 243) | constructor(mouth: Mouth, width: number = 400, height: number = 300) {
    method start (line 259) | start(timestamp: number): void {
    method stop (line 265) | stop(): void {

FILE: tts/tts-typescript-quickstart/audio_player.ts
  constant SAMPLE_RATE (line 3) | const SAMPLE_RATE = 48000;
  function startAudioPlayer (line 12) | function startAudioPlayer(mode: 'raw' | 'container' = 'container') {

FILE: tts/tts-typescript-quickstart/index.test.ts
  function waitForStreamOpen (line 309) | function waitForStreamOpen(getStream: () => any): Promise<void> {

FILE: tts/tts-typescript-quickstart/vite-env.d.ts
  type ImportMetaEnv (line 3) | interface ImportMetaEnv {
  type ImportMeta (line 7) | interface ImportMeta {

FILE: tts/tts-unity-quickstart/Assets/Scripts/HumeSpeaker.cs
  class HumeSpeaker (line 9) | [RequireComponent(typeof(AudioSource))]
    method SetApiKey (line 16) | public void SetApiKey(string key)
    method Speak (line 21) | public async void Speak()
    method ConvertBase64ToAudioClip (line 61) | private AudioClip ConvertBase64ToAudioClip(string base64Audio)
    method ConvertS16LEToFloats (line 82) | private float[] ConvertS16LEToFloats(byte[] bytes)

FILE: tts/tts-unity-quickstart/Assets/Scripts/SceneBuilder.cs
  class SceneBuilder (line 3) | public class SceneBuilder : MonoBehaviour
    method Awake (line 8) | void Awake()
    method Start (line 13) | void Start()
    method BuildScene (line 18) | void BuildScene()
  class CubeSpinner (line 59) | public class CubeSpinner : MonoBehaviour
    method Start (line 65) | void Start()
    method Update (line 70) | void Update()
  class ClickToSpeak (line 83) | public class ClickToSpeak : MonoBehaviour
    method Start (line 87) | void Start()
    method OnMouseDown (line 92) | void OnMouseDown()
Copy disabled (too large) Download .json
Condensed preview — 678 files, each showing path, character count, and a content snippet. Download the .json file for the full structured content (11,836K chars).
[
  {
    "path": ".github/dependabot.yml",
    "chars": 2630,
    "preview": "version: 2\n\nupdates:\n  # check for updated versions of github actions on a weekly basis\n  - package-ecosystem: 'github-a"
  },
  {
    "path": ".github/workflows/dependabot-auto-merge.yml",
    "chars": 880,
    "preview": "name: Dependabot auto-merge\n\non:\n  pull_request_target:\n    types: [opened, reopened, ready_for_review, synchronize]\n\npe"
  },
  {
    "path": ".github/workflows/test-examples.yml",
    "chars": 34336,
    "preview": "name: test-examples\n\non:\n  pull_request:\n    types: [opened, synchronize, reopened]\n  push:\n    branches: [main, master]"
  },
  {
    "path": ".gitignore",
    "chars": 96,
    "preview": ".hume/\n__pycache__/\n.venv/\n.DS_Store\n.env\nnode_modules/\n.pnpm-store/\ndist/\n.vscode/\n.mypy_cache/"
  },
  {
    "path": "Directory.Packages.props",
    "chars": 938,
    "preview": "<Project>\n  <PropertyGroup>\n    <ManagePackageVersionsCentrally>true</ManagePackageVersionsCentrally>\n  </PropertyGroup>"
  },
  {
    "path": "LICENSE",
    "chars": 1064,
    "preview": "MIT License\n\nCopyright (c) 2023 Hume AI\n\nPermission is hereby granted, free of charge, to any person obtaining a copy\nof"
  },
  {
    "path": "README.md",
    "chars": 8228,
    "preview": "<div align=\"center\">\n  <img src=\"https://storage.googleapis.com/hume-public-logos/hume/hume-banner.png\">\n  <h1>Hume API "
  },
  {
    "path": "evi/evi-dotnet-quickstart/.gitignore",
    "chars": 143,
    "preview": "# Build outputs\n[Bb]in/\n[Oo]bj/\n\n# IDE\n.vs/\n.idea/\n*.user\n*.suo\n\n# macOS\n.DS_Store\n\n# Environment\n.env\n\n# Allow sample a"
  },
  {
    "path": "evi/evi-dotnet-quickstart/EviTests.cs",
    "chars": 8261,
    "preview": "// To run tests:\n// dotnet test evi-csharp-quickstart.tests.csproj --logger \"console;verbosity=detailed\"\n\nusing System;\n"
  },
  {
    "path": "evi/evi-dotnet-quickstart/Program.cs",
    "chars": 4666,
    "preview": "using System;\nusing System.IO;\nusing System.Linq;\nusing System.Threading.Tasks;\nusing DotNetEnv;\nusing Hume;\nusing Hume."
  },
  {
    "path": "evi/evi-dotnet-quickstart/README.md",
    "chars": 2226,
    "preview": "<div align=\"center\">\n  <img src=\"https://storage.googleapis.com/hume-public-logos/hume/hume-banner.png\">\n  <h1>EVI | C# "
  },
  {
    "path": "evi/evi-dotnet-quickstart/evi-csharp-quickstart.csproj",
    "chars": 447,
    "preview": "<Project Sdk=\"Microsoft.NET.Sdk\">\n\n  <PropertyGroup>\n    <OutputType>Exe</OutputType>\n    <TargetFramework>net9.0</Targe"
  },
  {
    "path": "evi/evi-dotnet-quickstart/evi-csharp-quickstart.tests.csproj",
    "chars": 1078,
    "preview": "<Project Sdk=\"Microsoft.NET.Sdk\">\n\n  <PropertyGroup>\n    <TargetFramework>net9.0</TargetFramework>\n    <RootNamespace>Ev"
  },
  {
    "path": "evi/evi-flutter/.gitignore",
    "chars": 781,
    "preview": "# Miscellaneous\n*.class\n*.log\n*.pyc\n*.swp\n.DS_Store\n.atom/\n.build/\n.buildlog/\n.history\n.svn/\n.swiftpm/\nmigrate_working_d"
  },
  {
    "path": "evi/evi-flutter/README.md",
    "chars": 3569,
    "preview": "<div align=\"center\">\n  <img src=\"https://storage.googleapis.com/hume-public-logos/hume/hume-banner.png\">\n  <h1>Empathic "
  },
  {
    "path": "evi/evi-flutter/analysis_options.yaml",
    "chars": 1420,
    "preview": "# This file configures the analyzer, which statically analyzes Dart code to\n# check for errors, warnings, and lints.\n#\n#"
  },
  {
    "path": "evi/evi-flutter/android/.gitignore",
    "chars": 247,
    "preview": "gradle-wrapper.jar\n/.gradle\n/captures/\n/gradlew\n/gradlew.bat\n/local.properties\nGeneratedPluginRegistrant.java\n\n# Remembe"
  },
  {
    "path": "evi/evi-flutter/android/app/build.gradle",
    "chars": 1351,
    "preview": "plugins {\n    id \"com.android.application\"\n    id \"kotlin-android\"\n    // The Flutter Gradle Plugin must be applied afte"
  },
  {
    "path": "evi/evi-flutter/android/app/src/debug/AndroidManifest.xml",
    "chars": 378,
    "preview": "<manifest xmlns:android=\"http://schemas.android.com/apk/res/android\">\n    <!-- The INTERNET permission is required for d"
  },
  {
    "path": "evi/evi-flutter/android/app/src/main/AndroidManifest.xml",
    "chars": 2199,
    "preview": "<manifest xmlns:android=\"http://schemas.android.com/apk/res/android\">\n    <application\n        android:label=\"evi_exampl"
  },
  {
    "path": "evi/evi-flutter/android/app/src/main/kotlin/com/example/evi_example/MainActivity.kt",
    "chars": 124,
    "preview": "package com.example.evi_example\n\nimport io.flutter.embedding.android.FlutterActivity\n\nclass MainActivity: FlutterActivit"
  },
  {
    "path": "evi/evi-flutter/android/app/src/main/res/drawable/launch_background.xml",
    "chars": 434,
    "preview": "<?xml version=\"1.0\" encoding=\"utf-8\"?>\n<!-- Modify this file to customize your launch splash screen -->\n<layer-list xmln"
  },
  {
    "path": "evi/evi-flutter/android/app/src/main/res/drawable-v21/launch_background.xml",
    "chars": 438,
    "preview": "<?xml version=\"1.0\" encoding=\"utf-8\"?>\n<!-- Modify this file to customize your launch splash screen -->\n<layer-list xmln"
  },
  {
    "path": "evi/evi-flutter/android/app/src/main/res/values/styles.xml",
    "chars": 996,
    "preview": "<?xml version=\"1.0\" encoding=\"utf-8\"?>\n<resources>\n    <!-- Theme applied to the Android Window while the process is sta"
  },
  {
    "path": "evi/evi-flutter/android/app/src/main/res/values-night/styles.xml",
    "chars": 995,
    "preview": "<?xml version=\"1.0\" encoding=\"utf-8\"?>\n<resources>\n    <!-- Theme applied to the Android Window while the process is sta"
  },
  {
    "path": "evi/evi-flutter/android/app/src/profile/AndroidManifest.xml",
    "chars": 449,
    "preview": "<manifest xmlns:android=\"http://schemas.android.com/apk/res/android\">\n    <!-- The INTERNET permission is required for d"
  },
  {
    "path": "evi/evi-flutter/android/build.gradle",
    "chars": 322,
    "preview": "allprojects {\n    repositories {\n        google()\n        mavenCentral()\n    }\n}\n\nrootProject.buildDir = \"../build\"\nsubp"
  },
  {
    "path": "evi/evi-flutter/android/gradle/wrapper/gradle-wrapper.properties",
    "chars": 200,
    "preview": "distributionBase=GRADLE_USER_HOME\ndistributionPath=wrapper/dists\nzipStoreBase=GRADLE_USER_HOME\nzipStorePath=wrapper/dist"
  },
  {
    "path": "evi/evi-flutter/android/gradle.properties",
    "chars": 135,
    "preview": "org.gradle.jvmargs=-Xmx4G -XX:MaxMetaspaceSize=2G -XX:+HeapDumpOnOutOfMemoryError\nandroid.useAndroidX=true\nandroid.enabl"
  },
  {
    "path": "evi/evi-flutter/android/settings.gradle",
    "chars": 727,
    "preview": "pluginManagement {\n    def flutterSdkPath = {\n        def properties = new Properties()\n        file(\"local.properties\")"
  },
  {
    "path": "evi/evi-flutter/audio/.gitignore",
    "chars": 531,
    "preview": "# Miscellaneous\n*.class\n*.log\n*.pyc\n*.swp\n.DS_Store\n.atom/\n.buildlog/\n.history\n.svn/\nmigrate_working_dir/\n\n# IntelliJ re"
  },
  {
    "path": "evi/evi-flutter/audio/.metadata",
    "chars": 967,
    "preview": "# This file tracks properties of this Flutter project.\n# Used by Flutter tool to assess capabilities and perform upgrade"
  },
  {
    "path": "evi/evi-flutter/audio/ios/.gitignore",
    "chars": 419,
    "preview": ".idea/\n.vagrant/\n.sconsign.dblite\n.svn/\n\n.DS_Store\n*.swp\nprofile\n\nDerivedData/\nbuild/\nGeneratedPluginRegistrant.h\nGenera"
  },
  {
    "path": "evi/evi-flutter/audio/ios/Assets/.gitkeep",
    "chars": 0,
    "preview": ""
  },
  {
    "path": "evi/evi-flutter/audio/ios/Classes/AudioPlugin.swift",
    "chars": 4958,
    "preview": "import AVFoundation\nimport Flutter\nimport UIKit\n\npublic class AudioPlugin: NSObject, FlutterPlugin {\n    private lazy va"
  },
  {
    "path": "evi/evi-flutter/audio/ios/Classes/Microphone.swift",
    "chars": 4482,
    "preview": "import AVFoundation\nimport Foundation\n\npublic enum MicrophoneError: Error {\n    case conversionFailed(details: String)\n "
  },
  {
    "path": "evi/evi-flutter/audio/ios/Classes/SoundPlayer.swift",
    "chars": 3158,
    "preview": "import AVFoundation\nimport Foundation\n\npublic enum SoundPlayerError: Error {\n    case invalidBase64String\n    case could"
  },
  {
    "path": "evi/evi-flutter/audio/ios/Resources/PrivacyInfo.xcprivacy",
    "chars": 373,
    "preview": "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n<!DOCTYPE plist PUBLIC \"-//Apple//DTD PLIST 1.0//EN\" \"http://www.apple.com/DTDs/P"
  },
  {
    "path": "evi/evi-flutter/audio/ios/audio.podspec",
    "chars": 1266,
    "preview": "#\n# To learn more about a Podspec see http://guides.cocoapods.org/syntax/podspec.html.\n# Run `pod lib lint audio.podspec"
  },
  {
    "path": "evi/evi-flutter/audio/lib/audio.dart",
    "chars": 2753,
    "preview": "import 'dart:async';\nimport 'dart:convert';\nimport 'dart:io' show Platform;\nimport 'package:flutter/foundation.dart' sho"
  },
  {
    "path": "evi/evi-flutter/audio/lib/audio_method_channel.dart",
    "chars": 554,
    "preview": "import 'package:flutter/foundation.dart';\nimport 'package:flutter/services.dart';\n\nimport 'audio_platform_interface.dart"
  },
  {
    "path": "evi/evi-flutter/audio/lib/audio_platform_interface.dart",
    "chars": 921,
    "preview": "import 'package:plugin_platform_interface/plugin_platform_interface.dart';\n\nimport 'audio_method_channel.dart';\n\nabstrac"
  },
  {
    "path": "evi/evi-flutter/audio/lib/dart_audio.dart",
    "chars": 4666,
    "preview": "import 'dart:async';\nimport 'dart:convert';\n\nimport 'package:audioplayers/audioplayers.dart';\nimport 'package:record/rec"
  },
  {
    "path": "evi/evi-flutter/audio/pubspec.yaml",
    "chars": 2300,
    "preview": "name: audio\ndescription: \"A new Flutter plugin project.\"\nversion: 0.0.1\nhomepage:\n\nenvironment:\n  sdk: ^3.5.4\n  flutter:"
  },
  {
    "path": "evi/evi-flutter/audio/test/audio_method_channel_test.dart",
    "chars": 758,
    "preview": "import 'package:flutter/services.dart';\nimport 'package:flutter_test/flutter_test.dart';\nimport 'package:audio/audio_met"
  },
  {
    "path": "evi/evi-flutter/audio/test/audio_test.dart",
    "chars": 884,
    "preview": "import 'package:flutter_test/flutter_test.dart';\nimport 'package:audio/audio.dart';\nimport 'package:audio/audio_platform"
  },
  {
    "path": "evi/evi-flutter/ios/.gitignore",
    "chars": 569,
    "preview": "**/dgph\n*.mode1v3\n*.mode2v3\n*.moved-aside\n*.pbxuser\n*.perspectivev3\n**/*sync/\n.sconsign.dblite\n.tags*\n**/.vagrant/\n**/De"
  },
  {
    "path": "evi/evi-flutter/ios/Flutter/AppFrameworkInfo.plist",
    "chars": 774,
    "preview": "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n<!DOCTYPE plist PUBLIC \"-//Apple//DTD PLIST 1.0//EN\" \"http://www.apple.com/DTDs/P"
  },
  {
    "path": "evi/evi-flutter/ios/Flutter/Debug.xcconfig",
    "chars": 107,
    "preview": "#include? \"Pods/Target Support Files/Pods-Runner/Pods-Runner.debug.xcconfig\"\n#include \"Generated.xcconfig\"\n"
  },
  {
    "path": "evi/evi-flutter/ios/Flutter/Release.xcconfig",
    "chars": 109,
    "preview": "#include? \"Pods/Target Support Files/Pods-Runner/Pods-Runner.release.xcconfig\"\n#include \"Generated.xcconfig\"\n"
  },
  {
    "path": "evi/evi-flutter/ios/Podfile",
    "chars": 1414,
    "preview": "# Uncomment this line to define a global platform for your project\n# platform :ios, '12.0'\n\n# CocoaPods analytics sends "
  },
  {
    "path": "evi/evi-flutter/ios/Runner/AppDelegate.swift",
    "chars": 391,
    "preview": "import Flutter\nimport UIKit\n\n@main\n@objc class AppDelegate: FlutterAppDelegate {\n  override func application(\n    _ appl"
  },
  {
    "path": "evi/evi-flutter/ios/Runner/Assets.xcassets/AppIcon.appiconset/Contents.json",
    "chars": 2519,
    "preview": "{\n  \"images\" : [\n    {\n      \"size\" : \"20x20\",\n      \"idiom\" : \"iphone\",\n      \"filename\" : \"Icon-App-20x20@2x.png\",\n   "
  },
  {
    "path": "evi/evi-flutter/ios/Runner/Assets.xcassets/LaunchImage.imageset/Contents.json",
    "chars": 391,
    "preview": "{\n  \"images\" : [\n    {\n      \"idiom\" : \"universal\",\n      \"filename\" : \"LaunchImage.png\",\n      \"scale\" : \"1x\"\n    },\n  "
  },
  {
    "path": "evi/evi-flutter/ios/Runner/Assets.xcassets/LaunchImage.imageset/README.md",
    "chars": 336,
    "preview": "# Launch Screen Assets\n\nYou can customize the launch screen with your own desired assets by replacing the image files in"
  },
  {
    "path": "evi/evi-flutter/ios/Runner/Base.lproj/LaunchScreen.storyboard",
    "chars": 2377,
    "preview": "<?xml version=\"1.0\" encoding=\"UTF-8\" standalone=\"no\"?>\n<document type=\"com.apple.InterfaceBuilder3.CocoaTouch.Storyboard"
  },
  {
    "path": "evi/evi-flutter/ios/Runner/Base.lproj/Main.storyboard",
    "chars": 1810,
    "preview": "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n<document type=\"com.apple.InterfaceBuilder3.CocoaTouch.Storyboard.XIB\" version=\"3"
  },
  {
    "path": "evi/evi-flutter/ios/Runner/Info.plist",
    "chars": 1748,
    "preview": "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n<!DOCTYPE plist PUBLIC \"-//Apple//DTD PLIST 1.0//EN\" \"http://www.apple.com/DTDs/P"
  },
  {
    "path": "evi/evi-flutter/ios/Runner/Runner-Bridging-Header.h",
    "chars": 38,
    "preview": "#import \"GeneratedPluginRegistrant.h\"\n"
  },
  {
    "path": "evi/evi-flutter/ios/Runner.xcodeproj/project.pbxproj",
    "chars": 30689,
    "preview": "// !$*UTF8*$!\n{\n\tarchiveVersion = 1;\n\tclasses = {\n\t};\n\tobjectVersion = 54;\n\tobjects = {\n\n/* Begin PBXBuildFile section *"
  },
  {
    "path": "evi/evi-flutter/ios/Runner.xcodeproj/project.xcworkspace/contents.xcworkspacedata",
    "chars": 135,
    "preview": "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n<Workspace\n   version = \"1.0\">\n   <FileRef\n      location = \"self:\">\n   </FileRef"
  },
  {
    "path": "evi/evi-flutter/ios/Runner.xcodeproj/project.xcworkspace/xcshareddata/IDEWorkspaceChecks.plist",
    "chars": 238,
    "preview": "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n<!DOCTYPE plist PUBLIC \"-//Apple//DTD PLIST 1.0//EN\" \"http://www.apple.com/DTDs/P"
  },
  {
    "path": "evi/evi-flutter/ios/Runner.xcodeproj/project.xcworkspace/xcshareddata/WorkspaceSettings.xcsettings",
    "chars": 226,
    "preview": "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n<!DOCTYPE plist PUBLIC \"-//Apple//DTD PLIST 1.0//EN\" \"http://www.apple.com/DTDs/P"
  },
  {
    "path": "evi/evi-flutter/ios/Runner.xcodeproj/xcshareddata/xcschemes/Runner.xcscheme",
    "chars": 3647,
    "preview": "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n<Scheme\n   LastUpgradeVersion = \"1510\"\n   version = \"1.3\">\n   <BuildAction\n      "
  },
  {
    "path": "evi/evi-flutter/ios/Runner.xcworkspace/contents.xcworkspacedata",
    "chars": 224,
    "preview": "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n<Workspace\n   version = \"1.0\">\n   <FileRef\n      location = \"group:Runner.xcodepr"
  },
  {
    "path": "evi/evi-flutter/ios/Runner.xcworkspace/xcshareddata/IDEWorkspaceChecks.plist",
    "chars": 238,
    "preview": "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n<!DOCTYPE plist PUBLIC \"-//Apple//DTD PLIST 1.0//EN\" \"http://www.apple.com/DTDs/P"
  },
  {
    "path": "evi/evi-flutter/ios/Runner.xcworkspace/xcshareddata/WorkspaceSettings.xcsettings",
    "chars": 226,
    "preview": "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n<!DOCTYPE plist PUBLIC \"-//Apple//DTD PLIST 1.0//EN\" \"http://www.apple.com/DTDs/P"
  },
  {
    "path": "evi/evi-flutter/ios/RunnerTests/RunnerTests.swift",
    "chars": 285,
    "preview": "import Flutter\nimport UIKit\nimport XCTest\n\nclass RunnerTests: XCTestCase {\n\n  func testExample() {\n    // If you add cod"
  },
  {
    "path": "evi/evi-flutter/ios/build/ios/XCBuildData/PIFCache/workspace/WORKSPACE@v11_hash=(null)_subobjects=4483c0dac1d2a63621e8a5d74e580a19-json",
    "chars": 297,
    "preview": "{\"guid\":\"dc4b70c03e8043e50e38f2068887b1d4\",\"name\":\"Pods\",\"path\":\"/Users/twitchard/dev/hume-api-examples/evi-flutter-exam"
  },
  {
    "path": "evi/evi-flutter/lib/chat_card.dart",
    "chars": 2561,
    "preview": "import 'dart:core';\n\nimport 'package:flutter/material.dart';\nimport 'theme.dart';\n\nenum Role { user, assistant }\n\nclass "
  },
  {
    "path": "evi/evi-flutter/lib/evi_message.dart",
    "chars": 2995,
    "preview": "import 'dart:convert';\n\n// Represents an incoming message sent from the /v0/evi/chat websocket endpoint of\n// the Hume A"
  },
  {
    "path": "evi/evi-flutter/lib/main.dart",
    "chars": 10724,
    "preview": "import 'dart:convert';\n\nimport 'package:flutter/material.dart';\nimport 'package:web_socket_channel/web_socket_channel.da"
  },
  {
    "path": "evi/evi-flutter/lib/theme.dart",
    "chars": 563,
    "preview": "import 'package:flutter/material.dart';\n\n// From CSS variables on hume.ai\nconst Color white = Color.fromRGBO(255, 255, 2"
  },
  {
    "path": "evi/evi-flutter/pubspec.yaml",
    "chars": 4014,
    "preview": "name: evi_example\ndescription: \"A new Flutter project.\"\n# The following line prevents the package from being accidentall"
  },
  {
    "path": "evi/evi-flutter/test/widget_test.dart",
    "chars": 1062,
    "preview": "// This is a basic Flutter widget test.\n//\n// To perform an interaction with a widget in your test, use the WidgetTester"
  },
  {
    "path": "evi/evi-flutter/web/index.html",
    "chars": 1226,
    "preview": "<!DOCTYPE html>\n<html>\n<head>\n  <!--\n    If you are serving your web app in a path other than the root, change the\n    h"
  },
  {
    "path": "evi/evi-flutter/web/manifest.json",
    "chars": 918,
    "preview": "{\n    \"name\": \"evi_example\",\n    \"short_name\": \"evi_example\",\n    \"start_url\": \".\",\n    \"display\": \"standalone\",\n    \"ba"
  },
  {
    "path": "evi/evi-next-js-app-router-quickstart/.eslintrc.json",
    "chars": 40,
    "preview": "{\n  \"extends\": \"next/core-web-vitals\"\n}\n"
  },
  {
    "path": "evi/evi-next-js-app-router-quickstart/.gitignore",
    "chars": 431,
    "preview": "# See https://help.github.com/articles/ignoring-files/ for more about ignoring files.\n\n# dependencies\n/node_modules\n/.pn"
  },
  {
    "path": "evi/evi-next-js-app-router-quickstart/.prettierrc.json",
    "chars": 3,
    "preview": "{}\n"
  },
  {
    "path": "evi/evi-next-js-app-router-quickstart/README.md",
    "chars": 3134,
    "preview": "<div align=\"center\">\n  <img src=\"https://storage.googleapis.com/hume-public-logos/hume/hume-banner.png\">\n  <h1>Empathic "
  },
  {
    "path": "evi/evi-next-js-app-router-quickstart/app/actions/set-llm-key.ts",
    "chars": 403,
    "preview": "\"use server\";\n\nimport { HumeClient } from \"hume\";\n\nconst hume = new HumeClient({\n  apiKey: process.env.HUME_API_KEY!,\n})"
  },
  {
    "path": "evi/evi-next-js-app-router-quickstart/app/api-key/page.tsx",
    "chars": 422,
    "preview": "import ChatLoader from \"@/components/ChatLoader\";\n\nexport const dynamic = \"force-dynamic\";\nexport const revalidate = 0;\n"
  },
  {
    "path": "evi/evi-next-js-app-router-quickstart/app/error.tsx",
    "chars": 333,
    "preview": "\"use client\";\n\nexport default function Error() {\n  return (\n    <div className={\"absolute inset-0 grid place-content-cen"
  },
  {
    "path": "evi/evi-next-js-app-router-quickstart/app/globals.css",
    "chars": 2821,
    "preview": "@import \"tailwindcss\";\n\n/* Register theme tokens for Tailwind v4 so utilities like border-border, font-sans work */\n@the"
  },
  {
    "path": "evi/evi-next-js-app-router-quickstart/app/layout.tsx",
    "chars": 736,
    "preview": "import type { Metadata } from \"next\";\nimport { GeistSans } from \"geist/font/sans\";\nimport { GeistMono } from \"geist/font"
  },
  {
    "path": "evi/evi-next-js-app-router-quickstart/app/page.tsx",
    "chars": 704,
    "preview": "import { fetchAccessToken } from \"hume\";\nimport ChatLoader from \"@/components/ChatLoader\";\n\nexport const dynamic = \"forc"
  },
  {
    "path": "evi/evi-next-js-app-router-quickstart/app/session-settings/page.tsx",
    "chars": 788,
    "preview": "import { fetchAccessToken } from \"hume\";\nimport ChatLoader from \"@/components/ChatLoader\";\nimport { E2E_SESSION_SETTINGS"
  },
  {
    "path": "evi/evi-next-js-app-router-quickstart/components/Chat.tsx",
    "chars": 1740,
    "preview": "\"use client\";\n\nimport { VoiceProvider } from \"@humeai/voice-react\";\nimport Messages from \"./Messages\";\nimport Controls f"
  },
  {
    "path": "evi/evi-next-js-app-router-quickstart/components/ChatLoader.tsx",
    "chars": 593,
    "preview": "\"use client\";\n\nimport dynamic from \"next/dynamic\";\nimport type { Hume } from \"hume\";\n\nconst Chat = dynamic(() => import("
  },
  {
    "path": "evi/evi-next-js-app-router-quickstart/components/Controls.tsx",
    "chars": 2937,
    "preview": "\"use client\";\nimport { useMicFft, useVoice } from \"@humeai/voice-react\";\nimport { Button } from \"./ui/button\";\nimport { "
  },
  {
    "path": "evi/evi-next-js-app-router-quickstart/components/Expressions.tsx",
    "chars": 1898,
    "preview": "\"use client\";\nimport { Hume } from \"hume\";\nimport { expressionColors, isExpressionColor } from \"@/utils/expressionColors"
  },
  {
    "path": "evi/evi-next-js-app-router-quickstart/components/Messages.tsx",
    "chars": 2094,
    "preview": "\"use client\";\nimport { cn } from \"@/utils\";\nimport { useVoice } from \"@humeai/voice-react\";\nimport Expressions from \"./E"
  },
  {
    "path": "evi/evi-next-js-app-router-quickstart/components/MicFFT.tsx",
    "chars": 1174,
    "preview": "\"use client\";\n\nimport { cn } from \"@/utils\";\nimport { motion } from \"framer-motion\";\nimport { AutoSizer } from \"react-vi"
  },
  {
    "path": "evi/evi-next-js-app-router-quickstart/components/Nav.tsx",
    "chars": 1732,
    "preview": "\"use client\";\n\nimport { useLayoutEffect, useState } from \"react\";\nimport HumeLogo from \"./logos/Hume\";\nimport { Button }"
  },
  {
    "path": "evi/evi-next-js-app-router-quickstart/components/StartCall.tsx",
    "chars": 2213,
    "preview": "import { ConnectOptions, useVoice } from \"@humeai/voice-react\";\nimport { AnimatePresence, motion } from \"framer-motion\";"
  },
  {
    "path": "evi/evi-next-js-app-router-quickstart/components/logos/GitHub.tsx",
    "chars": 2227,
    "preview": "import * as React from \"react\";\nimport type { SVGProps } from \"react\";\nconst Github = (props: SVGProps<SVGSVGElement>) ="
  },
  {
    "path": "evi/evi-next-js-app-router-quickstart/components/logos/Hume.tsx",
    "chars": 4737,
    "preview": "import type { FC, SVGAttributes } from \"react\";\nimport { useId } from \"react\";\n\nexport type HumeLogoProps = SVGAttribute"
  },
  {
    "path": "evi/evi-next-js-app-router-quickstart/components/ui/button.tsx",
    "chars": 1844,
    "preview": "import * as React from \"react\";\nimport { Slot } from \"@radix-ui/react-slot\";\nimport { cva, type VariantProps } from \"cla"
  },
  {
    "path": "evi/evi-next-js-app-router-quickstart/components/ui/toggle.tsx",
    "chars": 1455,
    "preview": "\"use client\";\n\nimport * as React from \"react\";\nimport * as TogglePrimitive from \"@radix-ui/react-toggle\";\nimport { cva, "
  },
  {
    "path": "evi/evi-next-js-app-router-quickstart/components.json",
    "chars": 338,
    "preview": "{\n  \"$schema\": \"https://ui.shadcn.com/schema.json\",\n  \"style\": \"default\",\n  \"rsc\": true,\n  \"tsx\": true,\n  \"tailwind\": {\n"
  },
  {
    "path": "evi/evi-next-js-app-router-quickstart/package.json",
    "chars": 1188,
    "preview": "{\n  \"name\": \"hume-evi-next-js-app-router\",\n  \"version\": \"0.1.0\",\n  \"private\": true,\n  \"homepage\": \"https://github.com/hu"
  },
  {
    "path": "evi/evi-next-js-app-router-quickstart/playwright.config.ts",
    "chars": 1117,
    "preview": "import { defineConfig } from \"@playwright/test\";\nimport { config } from \"dotenv\";\nimport { resolve } from \"path\";\n\nconfi"
  },
  {
    "path": "evi/evi-next-js-app-router-quickstart/postcss.config.mjs",
    "chars": 146,
    "preview": "/** @type {import('postcss-load-config').Config} */\nconst config = {\n  plugins: {\n    \"@tailwindcss/postcss\": {},\n  },\n}"
  },
  {
    "path": "evi/evi-next-js-app-router-quickstart/tailwind.config.ts",
    "chars": 2415,
    "preview": "import type { Config } from \"tailwindcss\";\nimport defaultTheme from \"tailwindcss/defaultTheme\";\n\nconst config = {\n  dark"
  },
  {
    "path": "evi/evi-next-js-app-router-quickstart/test-results/.last-run.json",
    "chars": 45,
    "preview": "{\n  \"status\": \"passed\",\n  \"failedTests\": []\n}"
  },
  {
    "path": "evi/evi-next-js-app-router-quickstart/tests/voice-react.spec.ts",
    "chars": 8063,
    "preview": "import { test, expect, Page } from \"@playwright/test\";\nimport { HumeClient } from \"hume\";\nimport type { Hume } from \"hum"
  },
  {
    "path": "evi/evi-next-js-app-router-quickstart/tsconfig.json",
    "chars": 698,
    "preview": "{\n  \"compilerOptions\": {\n    \"lib\": [\n      \"dom\",\n      \"dom.iterable\",\n      \"esnext\"\n    ],\n    \"allowJs\": true,\n    "
  },
  {
    "path": "evi/evi-next-js-app-router-quickstart/utils/e2e-hooks.ts",
    "chars": 773,
    "preview": "const ENABLED =\n  process.env.NEXT_PUBLIC_ENABLE_E2E_HOOKS &&\n  process.env.NEXT_PUBLIC_ENABLE_E2E_HOOKS !== \"false\";\n\nd"
  },
  {
    "path": "evi/evi-next-js-app-router-quickstart/utils/expressionColors.ts",
    "chars": 1491,
    "preview": "export const expressionColors = {\n  admiration: \"#ffc58f\",\n  adoration: \"#ffc6cc\",\n  aestheticAppreciation: \"#e2cbff\",\n "
  },
  {
    "path": "evi/evi-next-js-app-router-quickstart/utils/index.ts",
    "chars": 169,
    "preview": "import { type ClassValue, clsx } from \"clsx\";\nimport { twMerge } from \"tailwind-merge\";\n\nexport function cn(...inputs: C"
  },
  {
    "path": "evi/evi-next-js-app-router-quickstart/utils/session-settings.ts",
    "chars": 582,
    "preview": "import type { Hume } from \"hume\";\n\nexport const E2E_SESSION_SETTINGS = {\n  systemPrompt: \"You are a helpful assistant\",\n"
  },
  {
    "path": "evi/evi-next-js-function-calling/.eslintrc.json",
    "chars": 40,
    "preview": "{\n  \"extends\": \"next/core-web-vitals\"\n}\n"
  },
  {
    "path": "evi/evi-next-js-function-calling/.gitignore",
    "chars": 396,
    "preview": "# See https://help.github.com/articles/ignoring-files/ for more about ignoring files.\n\n# dependencies\n/node_modules\n/.pn"
  },
  {
    "path": "evi/evi-next-js-function-calling/.prettierrc.json",
    "chars": 3,
    "preview": "{}\n"
  },
  {
    "path": "evi/evi-next-js-function-calling/README.md",
    "chars": 5751,
    "preview": "<div align=\"center\">\n  <img src=\"https://storage.googleapis.com/hume-public-logos/hume/hume-banner.png\">\n  <h1>Empathic "
  },
  {
    "path": "evi/evi-next-js-function-calling/app/api/fetchWeather/route.ts",
    "chars": 561,
    "preview": "import { NextResponse } from \"next/server\";\nimport { fetchWeather } from \"@/utils/fetchWeather\";\n\nexport async function "
  },
  {
    "path": "evi/evi-next-js-function-calling/app/error.tsx",
    "chars": 333,
    "preview": "\"use client\";\n\nexport default function Error() {\n  return (\n    <div className={\"absolute inset-0 grid place-content-cen"
  },
  {
    "path": "evi/evi-next-js-function-calling/app/globals.css",
    "chars": 2821,
    "preview": "@import \"tailwindcss\";\n\n/* Register theme tokens for Tailwind v4 so utilities like border-border, font-sans work */\n@the"
  },
  {
    "path": "evi/evi-next-js-function-calling/app/layout.tsx",
    "chars": 736,
    "preview": "import type { Metadata } from \"next\";\nimport { GeistSans } from \"geist/font/sans\";\nimport { GeistMono } from \"geist/font"
  },
  {
    "path": "evi/evi-next-js-function-calling/app/page.tsx",
    "chars": 472,
    "preview": "import { fetchAccessToken } from \"hume\";\nimport ChatLoader from \"@/components/ChatLoader\";\n\nexport const dynamic = \"forc"
  },
  {
    "path": "evi/evi-next-js-function-calling/components/Chat.tsx",
    "chars": 2341,
    "preview": "\"use client\";\n\nimport { VoiceProvider, ToolCallHandler } from \"@humeai/voice-react\";\nimport Messages from \"./Messages\";\n"
  },
  {
    "path": "evi/evi-next-js-function-calling/components/ChatLoader.tsx",
    "chars": 255,
    "preview": "\"use client\";\n\nimport dynamic from \"next/dynamic\";\n\nconst Chat = dynamic(() => import(\"@/components/Chat\"), {\n  ssr: fal"
  },
  {
    "path": "evi/evi-next-js-function-calling/components/Controls.tsx",
    "chars": 2257,
    "preview": "\"use client\";\nimport { useMicFft, useVoice } from \"@humeai/voice-react\";\nimport { Button } from \"./ui/button\";\nimport { "
  },
  {
    "path": "evi/evi-next-js-function-calling/components/Expressions.tsx",
    "chars": 1898,
    "preview": "\"use client\";\nimport { Hume } from \"hume\";\nimport { expressionColors, isExpressionColor } from \"@/utils/expressionColors"
  },
  {
    "path": "evi/evi-next-js-function-calling/components/Messages.tsx",
    "chars": 2094,
    "preview": "\"use client\";\nimport { cn } from \"@/utils\";\nimport { useVoice } from \"@humeai/voice-react\";\nimport Expressions from \"./E"
  },
  {
    "path": "evi/evi-next-js-function-calling/components/MicFFT.tsx",
    "chars": 1174,
    "preview": "\"use client\";\n\nimport { cn } from \"@/utils\";\nimport { motion } from \"framer-motion\";\nimport { AutoSizer } from \"react-vi"
  },
  {
    "path": "evi/evi-next-js-function-calling/components/Nav.tsx",
    "chars": 1732,
    "preview": "\"use client\";\n\nimport { useLayoutEffect, useState } from \"react\";\nimport HumeLogo from \"./logos/Hume\";\nimport { Button }"
  },
  {
    "path": "evi/evi-next-js-function-calling/components/StartCall.tsx",
    "chars": 1863,
    "preview": "import { ConnectOptions, useVoice } from \"@humeai/voice-react\";\nimport { AnimatePresence, motion } from \"framer-motion\";"
  },
  {
    "path": "evi/evi-next-js-function-calling/components/logos/GitHub.tsx",
    "chars": 2227,
    "preview": "import * as React from \"react\";\nimport type { SVGProps } from \"react\";\nconst Github = (props: SVGProps<SVGSVGElement>) ="
  },
  {
    "path": "evi/evi-next-js-function-calling/components/logos/Hume.tsx",
    "chars": 4737,
    "preview": "import type { FC, SVGAttributes } from \"react\";\nimport { useId } from \"react\";\n\nexport type HumeLogoProps = SVGAttribute"
  },
  {
    "path": "evi/evi-next-js-function-calling/components/ui/button.tsx",
    "chars": 1844,
    "preview": "import * as React from \"react\";\nimport { Slot } from \"@radix-ui/react-slot\";\nimport { cva, type VariantProps } from \"cla"
  },
  {
    "path": "evi/evi-next-js-function-calling/components/ui/toggle.tsx",
    "chars": 1455,
    "preview": "\"use client\";\n\nimport * as React from \"react\";\nimport * as TogglePrimitive from \"@radix-ui/react-toggle\";\nimport { cva, "
  },
  {
    "path": "evi/evi-next-js-function-calling/components.json",
    "chars": 338,
    "preview": "{\n  \"$schema\": \"https://ui.shadcn.com/schema.json\",\n  \"style\": \"default\",\n  \"rsc\": true,\n  \"tsx\": true,\n  \"tailwind\": {\n"
  },
  {
    "path": "evi/evi-next-js-function-calling/package.json",
    "chars": 1135,
    "preview": "{\n  \"name\": \"evi-next-js-function-calling\",\n  \"version\": \"0.1.0\",\n  \"private\": true,\n  \"homepage\": \"https://github.com/h"
  },
  {
    "path": "evi/evi-next-js-function-calling/postcss.config.mjs",
    "chars": 146,
    "preview": "/** @type {import('postcss-load-config').Config} */\nconst config = {\n  plugins: {\n    \"@tailwindcss/postcss\": {},\n  },\n}"
  },
  {
    "path": "evi/evi-next-js-function-calling/tailwind.config.ts",
    "chars": 2406,
    "preview": "import type { Config } from \"tailwindcss\";\nimport defaultTheme from \"tailwindcss/defaultTheme\";\n\nconst config = {\n  dark"
  },
  {
    "path": "evi/evi-next-js-function-calling/tsconfig.json",
    "chars": 698,
    "preview": "{\n  \"compilerOptions\": {\n    \"lib\": [\n      \"dom\",\n      \"dom.iterable\",\n      \"esnext\"\n    ],\n    \"allowJs\": true,\n    "
  },
  {
    "path": "evi/evi-next-js-function-calling/utils/expressionColors.ts",
    "chars": 1491,
    "preview": "export const expressionColors = {\n  admiration: \"#ffc58f\",\n  adoration: \"#ffc6cc\",\n  aestheticAppreciation: \"#e2cbff\",\n "
  },
  {
    "path": "evi/evi-next-js-function-calling/utils/fetchWeather.ts",
    "chars": 2105,
    "preview": "import \"server-only\";\n\n/**\n * Function which consumes the geocode and weather APIs to get the current weather in a speci"
  },
  {
    "path": "evi/evi-next-js-function-calling/utils/index.ts",
    "chars": 169,
    "preview": "import { type ClassValue, clsx } from \"clsx\";\nimport { twMerge } from \"tailwind-merge\";\n\nexport function cn(...inputs: C"
  },
  {
    "path": "evi/evi-next-js-pages-router-quickstart/.eslintrc.json",
    "chars": 40,
    "preview": "{\n  \"extends\": \"next/core-web-vitals\"\n}\n"
  },
  {
    "path": "evi/evi-next-js-pages-router-quickstart/.gitignore",
    "chars": 396,
    "preview": "# See https://help.github.com/articles/ignoring-files/ for more about ignoring files.\n\n# dependencies\n/node_modules\n/.pn"
  },
  {
    "path": "evi/evi-next-js-pages-router-quickstart/.prettierrc.json",
    "chars": 3,
    "preview": "{}\n"
  },
  {
    "path": "evi/evi-next-js-pages-router-quickstart/README.md",
    "chars": 3163,
    "preview": "<div align=\"center\">\n  <img src=\"https://storage.googleapis.com/hume-public-logos/hume/hume-banner.png\">\n  <h1>Empathic "
  },
  {
    "path": "evi/evi-next-js-pages-router-quickstart/components/Chat.tsx",
    "chars": 1601,
    "preview": "\"use client\";\n\nimport { VoiceProvider } from \"@humeai/voice-react\";\nimport Messages from \"./Messages\";\nimport Controls f"
  },
  {
    "path": "evi/evi-next-js-pages-router-quickstart/components/Controls.tsx",
    "chars": 2257,
    "preview": "\"use client\";\nimport { useMicFft, useVoice } from \"@humeai/voice-react\";\nimport { Button } from \"./ui/button\";\nimport { "
  },
  {
    "path": "evi/evi-next-js-pages-router-quickstart/components/Expressions.tsx",
    "chars": 2070,
    "preview": "\"use client\";\nimport { Hume } from \"hume\";\nimport { expressionColors, isExpressionColor } from \"@/utils/expressionColors"
  },
  {
    "path": "evi/evi-next-js-pages-router-quickstart/components/Messages.tsx",
    "chars": 2094,
    "preview": "\"use client\";\nimport { cn } from \"@/utils\";\nimport { useVoice } from \"@humeai/voice-react\";\nimport Expressions from \"./E"
  },
  {
    "path": "evi/evi-next-js-pages-router-quickstart/components/MicFFT.tsx",
    "chars": 1174,
    "preview": "\"use client\";\n\nimport { cn } from \"@/utils\";\nimport { motion } from \"framer-motion\";\nimport { AutoSizer } from \"react-vi"
  },
  {
    "path": "evi/evi-next-js-pages-router-quickstart/components/Nav.tsx",
    "chars": 1720,
    "preview": "\"use client\";\n\nimport { useEffect, useState } from \"react\";\nimport HumeLogo from \"./logos/Hume\";\nimport { Button } from "
  },
  {
    "path": "evi/evi-next-js-pages-router-quickstart/components/StartCall.tsx",
    "chars": 1845,
    "preview": "import { ConnectOptions, useVoice } from \"@humeai/voice-react\";\nimport { AnimatePresence, motion } from \"framer-motion\";"
  },
  {
    "path": "evi/evi-next-js-pages-router-quickstart/components/logos/GitHub.tsx",
    "chars": 2227,
    "preview": "import * as React from \"react\";\nimport type { SVGProps } from \"react\";\nconst Github = (props: SVGProps<SVGSVGElement>) ="
  },
  {
    "path": "evi/evi-next-js-pages-router-quickstart/components/logos/Hume.tsx",
    "chars": 4737,
    "preview": "import type { FC, SVGAttributes } from \"react\";\nimport { useId } from \"react\";\n\nexport type HumeLogoProps = SVGAttribute"
  },
  {
    "path": "evi/evi-next-js-pages-router-quickstart/components/ui/button.tsx",
    "chars": 1844,
    "preview": "import * as React from \"react\";\nimport { Slot } from \"@radix-ui/react-slot\";\nimport { cva, type VariantProps } from \"cla"
  },
  {
    "path": "evi/evi-next-js-pages-router-quickstart/components/ui/toggle.tsx",
    "chars": 1455,
    "preview": "\"use client\";\n\nimport * as React from \"react\";\nimport * as TogglePrimitive from \"@radix-ui/react-toggle\";\nimport { cva, "
  },
  {
    "path": "evi/evi-next-js-pages-router-quickstart/components.json",
    "chars": 338,
    "preview": "{\n  \"$schema\": \"https://ui.shadcn.com/schema.json\",\n  \"style\": \"default\",\n  \"rsc\": true,\n  \"tsx\": true,\n  \"tailwind\": {\n"
  },
  {
    "path": "evi/evi-next-js-pages-router-quickstart/next.config.js",
    "chars": 54,
    "preview": "module.exports = {\n  transpilePackages: [\"geist\"],\n};\n"
  },
  {
    "path": "evi/evi-next-js-pages-router-quickstart/package.json",
    "chars": 1100,
    "preview": "{\n  \"name\": \"hume-evi-next-js-pages-router\",\n  \"version\": \"0.1.0\",\n  \"private\": true,\n  \"homepage\": \"https://github.com/"
  },
  {
    "path": "evi/evi-next-js-pages-router-quickstart/pages/500.tsx",
    "chars": 322,
    "preview": "export default function ErrorPage() {\n  return (\n    <div className={\"absolute inset-0 grid place-content-center\"}>\n    "
  },
  {
    "path": "evi/evi-next-js-pages-router-quickstart/pages/_app.tsx",
    "chars": 517,
    "preview": "import { Nav } from \"@/components/Nav\";\nimport type { AppProps } from \"next/app\";\nimport \"@/styles/globals.css\";\nimport "
  },
  {
    "path": "evi/evi-next-js-pages-router-quickstart/pages/_document.tsx",
    "chars": 308,
    "preview": "import { Html, Head, Main, NextScript } from \"next/document\";\nimport { cn } from \"@/utils\";\n\nexport default function Doc"
  },
  {
    "path": "evi/evi-next-js-pages-router-quickstart/pages/api/control-plane/set-llm-key.ts",
    "chars": 1104,
    "preview": "import type { NextApiRequest, NextApiResponse } from \"next\";\nimport { HumeClient } from \"hume\";\n\nconst hume = new HumeCl"
  },
  {
    "path": "evi/evi-next-js-pages-router-quickstart/pages/index.tsx",
    "chars": 1070,
    "preview": "import { fetchAccessToken } from \"hume\";\nimport { InferGetServerSidePropsType } from \"next\";\nimport dynamic from \"next/d"
  },
  {
    "path": "evi/evi-next-js-pages-router-quickstart/postcss.config.mjs",
    "chars": 146,
    "preview": "/** @type {import('postcss-load-config').Config} */\nconst config = {\n  plugins: {\n    \"@tailwindcss/postcss\": {},\n  },\n}"
  },
  {
    "path": "evi/evi-next-js-pages-router-quickstart/styles/globals.css",
    "chars": 2821,
    "preview": "@import \"tailwindcss\";\n\n/* Register theme tokens for Tailwind v4 so utilities like border-border, font-sans work */\n@the"
  },
  {
    "path": "evi/evi-next-js-pages-router-quickstart/tailwind.config.ts",
    "chars": 2406,
    "preview": "import type { Config } from \"tailwindcss\";\nimport defaultTheme from \"tailwindcss/defaultTheme\";\n\nconst config = {\n  dark"
  },
  {
    "path": "evi/evi-next-js-pages-router-quickstart/tsconfig.json",
    "chars": 667,
    "preview": "{\n  \"compilerOptions\": {\n    \"lib\": [\n      \"dom\",\n      \"dom.iterable\",\n      \"esnext\"\n    ],\n    \"allowJs\": true,\n    "
  },
  {
    "path": "evi/evi-next-js-pages-router-quickstart/utils/expressionColors.ts",
    "chars": 1491,
    "preview": "export const expressionColors = {\n  admiration: \"#ffc58f\",\n  adoration: \"#ffc6cc\",\n  aestheticAppreciation: \"#e2cbff\",\n "
  },
  {
    "path": "evi/evi-next-js-pages-router-quickstart/utils/index.ts",
    "chars": 169,
    "preview": "import { type ClassValue, clsx } from \"clsx\";\nimport { twMerge } from \"tailwind-merge\";\n\nexport function cn(...inputs: C"
  },
  {
    "path": "evi/evi-prompting-examples/README.md",
    "chars": 1499,
    "preview": "<div align=\"center\">\n  <img src=\"https://storage.googleapis.com/hume-public-logos/hume/hume-banner.png\">\n  <h1>Empathic "
  },
  {
    "path": "evi/evi-prompting-examples/deeper_questions_prompt.txt",
    "chars": 9938,
    "preview": "<role>\nAssistant is an empathic voice interface (EVI) built by Hume AI, a startup optimizing AI for human well-being. EV"
  },
  {
    "path": "evi/evi-prompting-examples/default_prompt.txt",
    "chars": 9899,
    "preview": "<role>\nAssistant is an empathic voice interface (EVI) built by Hume AI, a startup optimizing AI for human well-being. EV"
  },
  {
    "path": "evi/evi-prompting-examples/evi-3-default-prompt.txt",
    "chars": 3566,
    "preview": "<role> \nAssistant is an empathic voice interface (EVI) built by Hume AI, a startup optimizing AI for human well-being. E"
  },
  {
    "path": "evi/evi-python-chat-history/.gitignore",
    "chars": 16,
    "preview": ".env*.local\n.env"
  },
  {
    "path": "evi/evi-python-chat-history/README.md",
    "chars": 2797,
    "preview": "<div align=\"center\">\n  <img src=\"https://storage.googleapis.com/hume-public-logos/hume/hume-banner.png\">\n  <h1>Empathic "
  },
  {
    "path": "evi/evi-python-chat-history/main.py",
    "chars": 5171,
    "preview": "import asyncio\nimport json\nimport os\nfrom datetime import datetime\nfrom dotenv import load_dotenv\nfrom hume.client impor"
  },
  {
    "path": "evi/evi-python-chat-history/pyproject.toml",
    "chars": 400,
    "preview": "[tool.poetry]\nname = \"evi-python-chat-history\"\nversion = \"0.1.0\"\ndescription = \"\"\nauthors = [\"zach <hello@hume.ai>\"]\nrea"
  },
  {
    "path": "evi/evi-python-chat-history/transcript_4d720063-d4ab-4407-ad22-e41079373d79.txt",
    "chars": 1145,
    "preview": "[2024-12-17 12:01:08] User: Hello. How are you doing?\n[2024-12-17 12:01:09] Assistant: Hello! I'm here and ready to assi"
  },
  {
    "path": "evi/evi-python-clm-sse/README.md",
    "chars": 1250,
    "preview": "<div align=\"center\">\n  <img src=\"https://storage.googleapis.com/hume-public-logos/hume/hume-banner.png\">\n  <h1>Empathic "
  },
  {
    "path": "evi/evi-python-clm-sse/openai_sse.py",
    "chars": 2055,
    "preview": "from typing import AsyncIterable, Optional\nimport fastapi\nfrom fastapi.responses import StreamingResponse\nfrom openai.ty"
  },
  {
    "path": "evi/evi-python-clm-sse/pyproject.toml",
    "chars": 188,
    "preview": "[project]\nname = \"evi-python-clm-sse\"\nversion = \"0.1.0\"\ndescription = \"\"\nrequires-python = \">=3.11\"\ndependencies = [\n   "
  },
  {
    "path": "evi/evi-python-clm-wss/LICENSE",
    "chars": 1064,
    "preview": "MIT License\n\nCopyright (c) 2024 Hume AI\n\nPermission is hereby granted, free of charge, to any person obtaining a copy\nof"
  },
  {
    "path": "evi/evi-python-clm-wss/README.md",
    "chars": 7878,
    "preview": "<div align=\"center\">\n  <img src=\"https://storage.googleapis.com/hume-public-logos/hume/hume-banner.png\">\n  <h1>Empathic "
  },
  {
    "path": "evi/evi-python-clm-wss/docs/detailed-install-instructions-mac.md",
    "chars": 4136,
    "preview": "To install the prerequisites listed in the README on a Mac, you'll need to use some package managers. Here are all the s"
  },
  {
    "path": "evi/evi-python-clm-wss/main.py",
    "chars": 6419,
    "preview": "from fastapi import FastAPI, WebSocket\nimport json\nimport random\nimport uvicorn\nfrom typing import TypedDict, Dict, List"
  },
  {
    "path": "evi/evi-python-clm-wss/pyproject.toml",
    "chars": 541,
    "preview": "[project]\nname = \"evi-custom-language-model-demo\"\nversion = \"0.1.0\"\ndescription = \"\"\nauthors = [{name = \"Your Name\", ema"
  },
  {
    "path": "evi/evi-python-control-plane/LICENSE",
    "chars": 1064,
    "preview": "MIT License\n\nCopyright (c) 2024 Hume AI\n\nPermission is hereby granted, free of charge, to any person obtaining a copy\nof"
  },
  {
    "path": "evi/evi-python-control-plane/README.md",
    "chars": 2164,
    "preview": "<div align=\"center\">\n  <img src=\"https://storage.googleapis.com/hume-public-logos/hume/hume-banner.png\">\n  <h1>Empathic "
  },
  {
    "path": "evi/evi-python-control-plane/main.py",
    "chars": 15579,
    "preview": "\"\"\"\nEVI Control Plane Example\n\nThis example demonstrates how to use the EVI control plane to control and observe\nactive "
  },
  {
    "path": "evi/evi-python-control-plane/pyproject.toml",
    "chars": 257,
    "preview": "[project]\nname = \"evi-python-controlplane\"\nversion = \"0.1.0\"\ndescription = \"EVI Python control plane example\"\nreadme = \""
  },
  {
    "path": "evi/evi-python-function-calling/.gitignore",
    "chars": 16,
    "preview": ".env*.local\n.env"
  },
  {
    "path": "evi/evi-python-function-calling/LICENSE",
    "chars": 1064,
    "preview": "MIT License\n\nCopyright (c) 2024 Hume AI\n\nPermission is hereby granted, free of charge, to any person obtaining a copy\nof"
  },
  {
    "path": "evi/evi-python-function-calling/README.md",
    "chars": 7875,
    "preview": "<div align=\"center\">\n  <img src=\"https://storage.googleapis.com/hume-public-logos/hume/hume-banner.png\">\n  <h1>Empathic "
  },
  {
    "path": "evi/evi-python-function-calling/main.py",
    "chars": 15783,
    "preview": "import asyncio\nimport base64\nimport json\nimport os\nfrom dotenv import load_dotenv\nfrom typing import Union\nimport httpx\n"
  },
  {
    "path": "evi/evi-python-function-calling/utils.py",
    "chars": 819,
    "preview": "import datetime\n\ndef print_prompt(text: str) -> None:\n    \"\"\"Print a formatted message with a timestamp.\"\"\"\n    now = da"
  },
  {
    "path": "evi/evi-python-phone-calling-proxy-server/.gitignore",
    "chars": 22,
    "preview": ".env\nvenv/\n.reference/"
  },
  {
    "path": "evi/evi-python-phone-calling-proxy-server/README.md",
    "chars": 5599,
    "preview": "<div align=\"center\">\n  <img src=\"https://storage.googleapis.com/hume-public-logos/hume/hume-banner.png\">\n  <h1>Empathic "
  },
  {
    "path": "evi/evi-python-phone-calling-proxy-server/app.py",
    "chars": 11487,
    "preview": "import os\nimport asyncio\nimport json\nimport base64\nimport numpy as np\nfrom dotenv import load_dotenv\nfrom flask import F"
  },
  {
    "path": "evi/evi-python-phone-calling-proxy-server/audio_processors/__init__.py",
    "chars": 301,
    "preview": "\"\"\"Audio processors for converting between Twilio and EVI audio formats.\"\"\"\n\nfrom .twilio_audio_processor import TwilioA"
  }
]

// ... and 478 more files (download for full content)

About this extraction

This page contains the full source code of the HumeAI/hume-api-examples GitHub repository, extracted and formatted as plain text for AI agents and large language models (LLMs). The extraction includes 678 files (10.7 MB), approximately 2.8M tokens, and a symbol index with 787 extracted functions, classes, methods, constants, and types. Use this with OpenClaw, Claude, ChatGPT, Cursor, Windsurf, or any other AI tool that accepts text input. You can copy the full output to your clipboard or download it as a .txt file.

Extracted by GitExtract — free GitHub repo to text converter for AI. Built by Nikandr Surkov.

Copied to clipboard!