Repository: HumeAI/hume-api-examples Branch: main Commit: 3f42c8766603 Files: 678 Total size: 10.7 MB Directory structure: gitextract_vgmds2a9/ ├── .github/ │ ├── dependabot.yml │ └── workflows/ │ ├── dependabot-auto-merge.yml │ └── test-examples.yml ├── .gitignore ├── Directory.Packages.props ├── LICENSE ├── README.md ├── evi/ │ ├── evi-dotnet-quickstart/ │ │ ├── .gitignore │ │ ├── EviTests.cs │ │ ├── Program.cs │ │ ├── README.md │ │ ├── evi-csharp-quickstart.csproj │ │ ├── evi-csharp-quickstart.tests.csproj │ │ └── sample_input.pcm │ ├── evi-flutter/ │ │ ├── .gitignore │ │ ├── README.md │ │ ├── analysis_options.yaml │ │ ├── android/ │ │ │ ├── .gitignore │ │ │ ├── app/ │ │ │ │ ├── build.gradle │ │ │ │ └── src/ │ │ │ │ ├── debug/ │ │ │ │ │ └── AndroidManifest.xml │ │ │ │ ├── main/ │ │ │ │ │ ├── AndroidManifest.xml │ │ │ │ │ ├── kotlin/ │ │ │ │ │ │ └── com/ │ │ │ │ │ │ └── example/ │ │ │ │ │ │ └── evi_example/ │ │ │ │ │ │ └── MainActivity.kt │ │ │ │ │ └── res/ │ │ │ │ │ ├── drawable/ │ │ │ │ │ │ └── launch_background.xml │ │ │ │ │ ├── drawable-v21/ │ │ │ │ │ │ └── launch_background.xml │ │ │ │ │ ├── values/ │ │ │ │ │ │ └── styles.xml │ │ │ │ │ └── values-night/ │ │ │ │ │ └── styles.xml │ │ │ │ └── profile/ │ │ │ │ └── AndroidManifest.xml │ │ │ ├── build.gradle │ │ │ ├── gradle/ │ │ │ │ └── wrapper/ │ │ │ │ └── gradle-wrapper.properties │ │ │ ├── gradle.properties │ │ │ └── settings.gradle │ │ ├── audio/ │ │ │ ├── .gitignore │ │ │ ├── .metadata │ │ │ ├── ios/ │ │ │ │ ├── .gitignore │ │ │ │ ├── Assets/ │ │ │ │ │ └── .gitkeep │ │ │ │ ├── Classes/ │ │ │ │ │ ├── AudioPlugin.swift │ │ │ │ │ ├── Microphone.swift │ │ │ │ │ └── SoundPlayer.swift │ │ │ │ ├── Resources/ │ │ │ │ │ └── PrivacyInfo.xcprivacy │ │ │ │ └── audio.podspec │ │ │ ├── lib/ │ │ │ │ ├── audio.dart │ │ │ │ ├── audio_method_channel.dart │ │ │ │ ├── audio_platform_interface.dart │ │ │ │ └── dart_audio.dart │ │ │ ├── pubspec.yaml │ │ │ └── test/ │ │ │ ├── audio_method_channel_test.dart │ │ │ └── audio_test.dart │ │ ├── ios/ │ │ │ ├── .gitignore │ │ │ ├── Flutter/ │ │ │ │ ├── AppFrameworkInfo.plist │ │ │ │ ├── Debug.xcconfig │ │ │ │ └── Release.xcconfig │ │ │ ├── Podfile │ │ │ ├── Runner/ │ │ │ │ ├── AppDelegate.swift │ │ │ │ ├── Assets.xcassets/ │ │ │ │ │ ├── AppIcon.appiconset/ │ │ │ │ │ │ └── Contents.json │ │ │ │ │ └── LaunchImage.imageset/ │ │ │ │ │ ├── Contents.json │ │ │ │ │ └── README.md │ │ │ │ ├── Base.lproj/ │ │ │ │ │ ├── LaunchScreen.storyboard │ │ │ │ │ └── Main.storyboard │ │ │ │ ├── Info.plist │ │ │ │ └── Runner-Bridging-Header.h │ │ │ ├── Runner.xcodeproj/ │ │ │ │ ├── project.pbxproj │ │ │ │ ├── project.xcworkspace/ │ │ │ │ │ ├── contents.xcworkspacedata │ │ │ │ │ └── xcshareddata/ │ │ │ │ │ ├── IDEWorkspaceChecks.plist │ │ │ │ │ └── WorkspaceSettings.xcsettings │ │ │ │ └── xcshareddata/ │ │ │ │ └── xcschemes/ │ │ │ │ └── Runner.xcscheme │ │ │ ├── Runner.xcworkspace/ │ │ │ │ ├── contents.xcworkspacedata │ │ │ │ └── xcshareddata/ │ │ │ │ ├── IDEWorkspaceChecks.plist │ │ │ │ └── WorkspaceSettings.xcsettings │ │ │ ├── RunnerTests/ │ │ │ │ └── RunnerTests.swift │ │ │ └── build/ │ │ │ └── ios/ │ │ │ └── XCBuildData/ │ │ │ └── PIFCache/ │ │ │ └── workspace/ │ │ │ └── WORKSPACE@v11_hash=(null)_subobjects=4483c0dac1d2a63621e8a5d74e580a19-json │ │ ├── lib/ │ │ │ ├── chat_card.dart │ │ │ ├── evi_message.dart │ │ │ ├── main.dart │ │ │ └── theme.dart │ │ ├── pubspec.yaml │ │ ├── test/ │ │ │ └── widget_test.dart │ │ └── web/ │ │ ├── index.html │ │ └── manifest.json │ ├── evi-next-js-app-router-quickstart/ │ │ ├── .eslintrc.json │ │ ├── .gitignore │ │ ├── .prettierrc.json │ │ ├── README.md │ │ ├── app/ │ │ │ ├── actions/ │ │ │ │ └── set-llm-key.ts │ │ │ ├── api-key/ │ │ │ │ └── page.tsx │ │ │ ├── error.tsx │ │ │ ├── globals.css │ │ │ ├── layout.tsx │ │ │ ├── page.tsx │ │ │ └── session-settings/ │ │ │ └── page.tsx │ │ ├── components/ │ │ │ ├── Chat.tsx │ │ │ ├── ChatLoader.tsx │ │ │ ├── Controls.tsx │ │ │ ├── Expressions.tsx │ │ │ ├── Messages.tsx │ │ │ ├── MicFFT.tsx │ │ │ ├── Nav.tsx │ │ │ ├── StartCall.tsx │ │ │ ├── logos/ │ │ │ │ ├── GitHub.tsx │ │ │ │ └── Hume.tsx │ │ │ └── ui/ │ │ │ ├── button.tsx │ │ │ └── toggle.tsx │ │ ├── components.json │ │ ├── package.json │ │ ├── playwright.config.ts │ │ ├── postcss.config.mjs │ │ ├── tailwind.config.ts │ │ ├── test-results/ │ │ │ └── .last-run.json │ │ ├── tests/ │ │ │ └── voice-react.spec.ts │ │ ├── tsconfig.json │ │ └── utils/ │ │ ├── e2e-hooks.ts │ │ ├── expressionColors.ts │ │ ├── index.ts │ │ └── session-settings.ts │ ├── evi-next-js-function-calling/ │ │ ├── .eslintrc.json │ │ ├── .gitignore │ │ ├── .prettierrc.json │ │ ├── README.md │ │ ├── app/ │ │ │ ├── api/ │ │ │ │ └── fetchWeather/ │ │ │ │ └── route.ts │ │ │ ├── error.tsx │ │ │ ├── globals.css │ │ │ ├── layout.tsx │ │ │ └── page.tsx │ │ ├── components/ │ │ │ ├── Chat.tsx │ │ │ ├── ChatLoader.tsx │ │ │ ├── Controls.tsx │ │ │ ├── Expressions.tsx │ │ │ ├── Messages.tsx │ │ │ ├── MicFFT.tsx │ │ │ ├── Nav.tsx │ │ │ ├── StartCall.tsx │ │ │ ├── logos/ │ │ │ │ ├── GitHub.tsx │ │ │ │ └── Hume.tsx │ │ │ └── ui/ │ │ │ ├── button.tsx │ │ │ └── toggle.tsx │ │ ├── components.json │ │ ├── package.json │ │ ├── postcss.config.mjs │ │ ├── tailwind.config.ts │ │ ├── tsconfig.json │ │ └── utils/ │ │ ├── expressionColors.ts │ │ ├── fetchWeather.ts │ │ └── index.ts │ ├── evi-next-js-pages-router-quickstart/ │ │ ├── .eslintrc.json │ │ ├── .gitignore │ │ ├── .prettierrc.json │ │ ├── README.md │ │ ├── components/ │ │ │ ├── Chat.tsx │ │ │ ├── Controls.tsx │ │ │ ├── Expressions.tsx │ │ │ ├── Messages.tsx │ │ │ ├── MicFFT.tsx │ │ │ ├── Nav.tsx │ │ │ ├── StartCall.tsx │ │ │ ├── logos/ │ │ │ │ ├── GitHub.tsx │ │ │ │ └── Hume.tsx │ │ │ └── ui/ │ │ │ ├── button.tsx │ │ │ └── toggle.tsx │ │ ├── components.json │ │ ├── next.config.js │ │ ├── package.json │ │ ├── pages/ │ │ │ ├── 500.tsx │ │ │ ├── _app.tsx │ │ │ ├── _document.tsx │ │ │ ├── api/ │ │ │ │ └── control-plane/ │ │ │ │ └── set-llm-key.ts │ │ │ └── index.tsx │ │ ├── postcss.config.mjs │ │ ├── styles/ │ │ │ └── globals.css │ │ ├── tailwind.config.ts │ │ ├── tsconfig.json │ │ └── utils/ │ │ ├── expressionColors.ts │ │ └── index.ts │ ├── evi-prompting-examples/ │ │ ├── README.md │ │ ├── deeper_questions_prompt.txt │ │ ├── default_prompt.txt │ │ └── evi-3-default-prompt.txt │ ├── evi-python-chat-history/ │ │ ├── .gitignore │ │ ├── README.md │ │ ├── main.py │ │ ├── pyproject.toml │ │ └── transcript_4d720063-d4ab-4407-ad22-e41079373d79.txt │ ├── evi-python-clm-sse/ │ │ ├── README.md │ │ ├── openai_sse.py │ │ └── pyproject.toml │ ├── evi-python-clm-wss/ │ │ ├── LICENSE │ │ ├── README.md │ │ ├── docs/ │ │ │ └── detailed-install-instructions-mac.md │ │ ├── main.py │ │ └── pyproject.toml │ ├── evi-python-control-plane/ │ │ ├── LICENSE │ │ ├── README.md │ │ ├── main.py │ │ └── pyproject.toml │ ├── evi-python-function-calling/ │ │ ├── .gitignore │ │ ├── LICENSE │ │ ├── README.md │ │ ├── main.py │ │ └── utils.py │ ├── evi-python-phone-calling-proxy-server/ │ │ ├── .gitignore │ │ ├── README.md │ │ ├── app.py │ │ ├── audio_processors/ │ │ │ ├── __init__.py │ │ │ ├── evi_audio_processor.py │ │ │ └── twilio_audio_processor.py │ │ ├── pyproject.toml │ │ └── tools.py │ ├── evi-python-quickstart/ │ │ ├── .gitignore │ │ ├── LICENSE │ │ ├── README.md │ │ ├── conftest.py │ │ ├── pyproject.toml │ │ ├── quickstart.py │ │ └── test_quickstart.py │ ├── evi-python-raw-api/ │ │ ├── .gitignore │ │ ├── LICENSE │ │ ├── README.md │ │ ├── requirements_linux.txt │ │ ├── requirements_mac.txt │ │ └── src/ │ │ ├── authenticator.py │ │ ├── connection.py │ │ ├── devices.py │ │ └── main.py │ ├── evi-python-webhooks/ │ │ ├── .gitignore │ │ ├── README.md │ │ ├── app.py │ │ ├── pyproject.toml │ │ └── utils.py │ ├── evi-python-wss-clm-endpoint/ │ │ ├── .dockerignore │ │ ├── Dockerfile │ │ ├── README.md │ │ ├── agent.py │ │ ├── app.py │ │ ├── cdk/ │ │ │ ├── README.md │ │ │ ├── app.py │ │ │ ├── cdk/ │ │ │ │ ├── __init__.py │ │ │ │ └── eliza_stack.py │ │ │ ├── cdk.json │ │ │ ├── cdk.out/ │ │ │ │ ├── ElizaStack.assets.json │ │ │ │ ├── ElizaStack.template.json │ │ │ │ ├── asset.689e46f5ffafa1e0f81f114b5dfd7694d2d1e291d9bd855e4f7b601d2b2403d0/ │ │ │ │ │ ├── .dockerignore │ │ │ │ │ ├── Dockerfile │ │ │ │ │ ├── README.md │ │ │ │ │ ├── agent.py │ │ │ │ │ ├── app.py │ │ │ │ │ ├── modal/ │ │ │ │ │ │ ├── README.md │ │ │ │ │ │ └── modal_app.py │ │ │ │ │ └── pyproject.toml │ │ │ │ ├── asset.ee7de53d64cc9d6248fa6aa550f92358f6c907b5efd6f3298aeab1b5e7ea358a/ │ │ │ │ │ ├── __entrypoint__.js │ │ │ │ │ └── index.js │ │ │ │ ├── asset.f372550efb96be7f549f1d0346e8646080c1fe2b15c7c2e3b1dceb07b6656c54/ │ │ │ │ │ ├── .dockerignore │ │ │ │ │ ├── Dockerfile │ │ │ │ │ ├── README.md │ │ │ │ │ ├── agent.py │ │ │ │ │ ├── app.py │ │ │ │ │ ├── modal_app.py │ │ │ │ │ └── pyproject.toml │ │ │ │ ├── cdk.out │ │ │ │ ├── manifest.json │ │ │ │ └── tree.json │ │ │ └── requirements.txt │ │ ├── modal/ │ │ │ ├── README.md │ │ │ └── modal_app.py │ │ ├── modal_app.py │ │ └── pyproject.toml │ ├── evi-react-native/ │ │ ├── .gitignore │ │ ├── App.tsx │ │ ├── README.md │ │ ├── VoiceIsolationModePrompt.tsx │ │ ├── app.json │ │ ├── eslint.config.js │ │ ├── index.ts │ │ ├── metro.config.js │ │ ├── modules/ │ │ │ └── audio/ │ │ │ ├── expo-module.config.json │ │ │ ├── index.ts │ │ │ └── src/ │ │ │ ├── AudioModule.ts │ │ │ ├── AudioModule.types.ts │ │ │ └── AudioModule.web.ts │ │ ├── package.json │ │ ├── polyfills.ts │ │ └── tsconfig.json │ ├── evi-swift-chat/ │ │ ├── .gitignore │ │ ├── HumeDemo/ │ │ │ ├── Assets.xcassets/ │ │ │ │ ├── AccentColor.colorset/ │ │ │ │ │ └── Contents.json │ │ │ │ ├── AppIcon.appiconset/ │ │ │ │ │ └── Contents.json │ │ │ │ ├── Contents.json │ │ │ │ └── Logo.imageset/ │ │ │ │ └── Contents.json │ │ │ ├── EVIDemo/ │ │ │ │ ├── Clients/ │ │ │ │ │ └── AccessTokenClient.swift │ │ │ │ ├── Extensions/ │ │ │ │ │ └── Dictionary+Additions.swift │ │ │ │ ├── Mocks.swift │ │ │ │ ├── Rows/ │ │ │ │ │ ├── DetailedRow.swift │ │ │ │ │ └── MessageRow.swift │ │ │ │ └── Views/ │ │ │ │ ├── Components/ │ │ │ │ │ ├── EventRowView.swift │ │ │ │ │ └── RowView.swift │ │ │ │ ├── EVIChatView.swift │ │ │ │ ├── Models/ │ │ │ │ │ └── EVIChatModel.swift │ │ │ │ └── Modifiers/ │ │ │ │ └── FlippedUpsideDown.swift │ │ │ ├── HumeDemoApp.swift │ │ │ ├── Info.plist │ │ │ └── Preview Content/ │ │ │ ├── EVIChatModel+Previews.swift │ │ │ └── Preview Assets.xcassets/ │ │ │ └── Contents.json │ │ ├── HumeDemo.xcodeproj/ │ │ │ ├── project.pbxproj │ │ │ └── xcshareddata/ │ │ │ └── xcschemes/ │ │ │ └── HumeDemo.xcscheme │ │ ├── README.md │ │ └── access_token_service/ │ │ ├── README.md │ │ ├── requirements.txt │ │ └── run_token_service.py │ ├── evi-touchdesigner/ │ │ ├── .gitignore │ │ ├── HumeTD.tox │ │ ├── HumeTDDemo.toe │ │ ├── README.md │ │ └── Scripts/ │ │ ├── HumeTD.py │ │ └── MessagePlaback.py │ ├── evi-typescript-chat-history/ │ │ ├── .gitignore │ │ ├── README.md │ │ ├── package.json │ │ ├── src/ │ │ │ └── index.ts │ │ └── tsconfig.json │ ├── evi-typescript-function-calling/ │ │ ├── .gitignore │ │ ├── README.md │ │ ├── index.html │ │ ├── package.json │ │ ├── src/ │ │ │ ├── handleToolCall.ts │ │ │ ├── main.ts │ │ │ ├── styles.css │ │ │ └── vite-env.d.ts │ │ └── tsconfig.json │ ├── evi-typescript-proxy/ │ │ ├── .gitignore │ │ ├── README.md │ │ ├── app/ │ │ │ ├── api.ts │ │ │ ├── cli.ts │ │ │ ├── downstream.ts │ │ │ ├── main.ts │ │ │ ├── package.json │ │ │ ├── tsconfig.json │ │ │ ├── upstream.ts │ │ │ └── util.ts │ │ ├── shared/ │ │ │ └── types.mts │ │ └── web/ │ │ ├── .gitignore │ │ ├── ChatControls.tsx │ │ ├── ChatMessages.tsx │ │ ├── EVIChat.tsx │ │ ├── StartCall.tsx │ │ ├── WebSocketControls.tsx │ │ ├── app.tsx │ │ ├── index.html │ │ ├── package.json │ │ ├── styles.css │ │ ├── tsconfig.json │ │ └── useProxyState.ts │ ├── evi-typescript-quickstart/ │ │ ├── .gitignore │ │ ├── .prettierrc.json │ │ ├── README.md │ │ ├── index.html │ │ ├── package.json │ │ ├── src/ │ │ │ ├── lib/ │ │ │ │ ├── audio.ts │ │ │ │ ├── evi.test.ts │ │ │ │ ├── evi.ts │ │ │ │ ├── index.ts │ │ │ │ └── ui.ts │ │ │ ├── main.ts │ │ │ ├── styles/ │ │ │ │ └── globals.css │ │ │ └── vite-env.d.ts │ │ ├── tsconfig.json │ │ └── vitest.config.ts │ ├── evi-typescript-webhooks/ │ │ ├── .gitignore │ │ ├── README.md │ │ ├── package.json │ │ ├── src/ │ │ │ ├── main.ts │ │ │ └── util.ts │ │ └── tsconfig.json │ ├── evi-unity-quickstart/ │ │ ├── .gitignore │ │ ├── Assets/ │ │ │ ├── DefaultScene.unity │ │ │ ├── DefaultScene.unity.meta │ │ │ ├── Editor/ │ │ │ │ └── AutoLoadDefaultScene.cs │ │ │ ├── Plugins/ │ │ │ │ ├── Microsoft.Extensions.DependencyInjection.Abstractions.dll.meta │ │ │ │ ├── Microsoft.Extensions.Logging.Abstractions.dll.meta │ │ │ │ └── Microsoft.IO.RecyclableMemoryStream.dll.meta │ │ │ ├── Plugins.meta │ │ │ ├── Scripts/ │ │ │ │ ├── HumeEVI.cs │ │ │ │ ├── HumeEVI.cs.meta │ │ │ │ ├── SceneBuilder.cs │ │ │ │ └── SceneBuilder.cs.meta │ │ │ └── Scripts.meta │ │ ├── Packages/ │ │ │ └── manifest.json │ │ ├── ProjectSettings/ │ │ │ ├── AudioManager.asset │ │ │ ├── ClusterInputManager.asset │ │ │ ├── DynamicsManager.asset │ │ │ ├── EditorBuildSettings.asset │ │ │ ├── EditorSettings.asset │ │ │ ├── GraphicsSettings.asset │ │ │ ├── InputManager.asset │ │ │ ├── MemorySettings.asset │ │ │ ├── MultiplayerManager.asset │ │ │ ├── NavMeshAreas.asset │ │ │ ├── PackageManagerSettings.asset │ │ │ ├── Physics2DSettings.asset │ │ │ ├── PresetManager.asset │ │ │ ├── ProjectSettings.asset │ │ │ ├── ProjectVersion.txt │ │ │ ├── QualitySettings.asset │ │ │ ├── SceneTemplateSettings.json │ │ │ ├── TagManager.asset │ │ │ ├── TimeManager.asset │ │ │ ├── UnityConnectSettings.asset │ │ │ ├── VFXManager.asset │ │ │ └── VersionControlSettings.asset │ │ └── README.md │ └── evi-vue-widget/ │ ├── .gitignore │ ├── .nvmrc │ ├── README.md │ ├── index.html │ ├── package.json │ ├── src/ │ │ ├── App.vue │ │ ├── components/ │ │ │ └── HumeEmbed.vue │ │ ├── main.ts │ │ ├── style.css │ │ └── vite-env.d.ts │ ├── tsconfig.json │ ├── tsconfig.node.json │ └── vite.config.ts ├── expression-measurement/ │ ├── batch/ │ │ ├── next-js-emotional-language/ │ │ │ ├── .env.example │ │ │ ├── .eslintrc.json │ │ │ ├── .gitignore │ │ │ ├── .prettierrc │ │ │ ├── README.md │ │ │ ├── next.config.js │ │ │ ├── package.json │ │ │ ├── postcss.config.js │ │ │ ├── src/ │ │ │ │ ├── components/ │ │ │ │ │ ├── Introduction.tsx │ │ │ │ │ ├── TextRender.tsx │ │ │ │ │ └── Tooltip.tsx │ │ │ │ ├── lib/ │ │ │ │ │ ├── client.ts │ │ │ │ │ ├── env.ts │ │ │ │ │ ├── mutations/ │ │ │ │ │ │ └── processTextFile.ts │ │ │ │ │ ├── schemas/ │ │ │ │ │ │ └── index.ts │ │ │ │ │ └── utils.ts │ │ │ │ ├── pages/ │ │ │ │ │ ├── _app.tsx │ │ │ │ │ ├── _document.tsx │ │ │ │ │ ├── api/ │ │ │ │ │ │ ├── results.ts │ │ │ │ │ │ └── send.ts │ │ │ │ │ └── index.tsx │ │ │ │ └── styles/ │ │ │ │ └── globals.css │ │ │ ├── tailwind.config.js │ │ │ └── tsconfig.json │ │ ├── python-top-emotions/ │ │ │ ├── README.md │ │ │ └── top_emotions.py │ │ └── typescript-raw-text-processor/ │ │ ├── .gitignore │ │ ├── README.md │ │ ├── package.json │ │ ├── src/ │ │ │ ├── index.test.ts │ │ │ └── index.ts │ │ └── tsconfig.json │ ├── streaming/ │ │ ├── next-js-streaming-example/ │ │ │ ├── .gitignore │ │ │ ├── README.md │ │ │ ├── components/ │ │ │ │ ├── inputs/ │ │ │ │ │ ├── Button.tsx │ │ │ │ │ ├── TextArea.tsx │ │ │ │ │ └── TextBox.tsx │ │ │ │ ├── menu/ │ │ │ │ │ ├── Auth.tsx │ │ │ │ │ ├── Login.tsx │ │ │ │ │ ├── Nav.tsx │ │ │ │ │ └── Toolbar.tsx │ │ │ │ └── widgets/ │ │ │ │ ├── AudioWidgets.tsx │ │ │ │ ├── BurstWidgets.tsx │ │ │ │ ├── Descriptor.tsx │ │ │ │ ├── DiscreteTimeline.tsx │ │ │ │ ├── FaceTrackedVideo.tsx │ │ │ │ ├── FaceWidgets.tsx │ │ │ │ ├── LanguageWidgets.tsx │ │ │ │ ├── Loader.tsx │ │ │ │ ├── LoaderSet.tsx │ │ │ │ ├── ProsodyWidgets.tsx │ │ │ │ └── TopEmotions.tsx │ │ │ ├── lib/ │ │ │ │ ├── data/ │ │ │ │ │ ├── audioPrediction.ts │ │ │ │ │ ├── boundingBox.ts │ │ │ │ │ ├── characterRange.ts │ │ │ │ │ ├── embedding.ts │ │ │ │ │ ├── emotion.ts │ │ │ │ │ ├── facePrediction.ts │ │ │ │ │ ├── languagePrediction.ts │ │ │ │ │ ├── range.ts │ │ │ │ │ ├── timeRange.ts │ │ │ │ │ └── trackedFace.ts │ │ │ │ ├── hooks/ │ │ │ │ │ ├── keyPress.ts │ │ │ │ │ ├── stability.ts │ │ │ │ │ └── storage.ts │ │ │ │ ├── media/ │ │ │ │ │ ├── audioRecorder.ts │ │ │ │ │ └── videoRecorder.ts │ │ │ │ └── utilities/ │ │ │ │ ├── asyncUtilities.ts │ │ │ │ ├── blobUtilities.ts │ │ │ │ ├── embeddingUtilities.ts │ │ │ │ ├── emotionUtilities.ts │ │ │ │ ├── environmentUtilities.ts │ │ │ │ ├── scalingUtilities.ts │ │ │ │ ├── styleUtilities.ts │ │ │ │ └── typeUtilities.ts │ │ │ ├── next.config.js │ │ │ ├── package.json │ │ │ ├── pages/ │ │ │ │ ├── _app.tsx │ │ │ │ ├── burst/ │ │ │ │ │ ├── index.tsx │ │ │ │ │ └── timeline/ │ │ │ │ │ └── index.tsx │ │ │ │ ├── face/ │ │ │ │ │ ├── calibrate/ │ │ │ │ │ │ └── index.tsx │ │ │ │ │ └── index.tsx │ │ │ │ ├── index.tsx │ │ │ │ ├── language/ │ │ │ │ │ └── index.tsx │ │ │ │ └── prosody/ │ │ │ │ └── index.tsx │ │ │ ├── postcss.config.js │ │ │ ├── styles/ │ │ │ │ └── globals.css │ │ │ ├── tailwind.config.js │ │ │ └── tsconfig.json │ │ └── python-streaming-example/ │ │ ├── .gitignore │ │ ├── README.md │ │ ├── main.py │ │ ├── pyproject.toml │ │ └── test_main.py │ └── visualization-example/ │ ├── example-notebook.ipynb │ └── predictions.json ├── monorepo.code-workspace └── tts/ ├── tts-dotnet-quickstart/ │ ├── .gitignore │ ├── Program.cs │ ├── README.md │ ├── StreamingTtsService.cs │ ├── TtsTests.cs │ ├── tts-csharp-quickstart.csproj │ └── tts-csharp-quickstart.tests.csproj ├── tts-next-js-agora/ │ ├── .gitignore │ ├── README.md │ ├── app/ │ │ ├── api/ │ │ │ ├── generate-agora-token/ │ │ │ │ └── route.ts │ │ │ ├── invite-agent/ │ │ │ │ └── route.ts │ │ │ └── stop-conversation/ │ │ │ └── route.ts │ │ ├── globals.css │ │ ├── layout.tsx │ │ └── page.tsx │ ├── components/ │ │ ├── AudioVisualizer.tsx │ │ ├── ConversationComponent.tsx │ │ ├── ConvoTextStream.tsx │ │ └── MicrophoneButton.tsx │ ├── env.example │ ├── eslint.config.mjs │ ├── lib/ │ │ └── message.ts │ ├── next.config.ts │ ├── package.json │ ├── tsconfig.json │ └── types/ │ ├── agora-rtc-react.d.ts │ ├── agora-token.d.ts │ └── conversation.ts ├── tts-next-js-chat/ │ ├── .gitignore │ ├── README.md │ ├── eslint.config.mjs │ ├── next.config.ts │ ├── package.json │ ├── postcss.config.mjs │ ├── src/ │ │ └── app/ │ │ ├── api/ │ │ │ ├── chat/ │ │ │ │ └── route.ts │ │ │ ├── transcribe/ │ │ │ │ └── route.ts │ │ │ ├── tts/ │ │ │ │ └── route.ts │ │ │ └── voices/ │ │ │ └── route.ts │ │ ├── components/ │ │ │ ├── AudioPlayer.tsx │ │ │ ├── Chat.tsx │ │ │ ├── ControlsPanel.tsx │ │ │ ├── VoiceSelector.tsx │ │ │ └── logos/ │ │ │ └── Hume.tsx │ │ ├── context/ │ │ │ └── VoiceSettingsContext.tsx │ │ ├── globals.css │ │ ├── hooks/ │ │ │ ├── useRecording.ts │ │ │ ├── useTts.ts │ │ │ └── useVoices.ts │ │ ├── layout.tsx │ │ ├── lib/ │ │ │ └── humeClient.ts │ │ └── page.tsx │ └── tsconfig.json ├── tts-next-js-vercel-ai-sdk/ │ ├── .gitignore │ ├── README.md │ ├── next.config.ts │ ├── package.json │ ├── postcss.config.mjs │ ├── src/ │ │ ├── actions/ │ │ │ ├── generate-speech.ts │ │ │ └── list-voices.ts │ │ ├── app/ │ │ │ ├── globals.css │ │ │ ├── layout.tsx │ │ │ └── page.tsx │ │ ├── components/ │ │ │ ├── AudioClipCard.tsx │ │ │ ├── AudioGallery.tsx │ │ │ ├── TextAreaField.tsx │ │ │ ├── TtsForm.tsx │ │ │ ├── VoiceSelect.tsx │ │ │ └── logos/ │ │ │ └── Hume.tsx │ │ ├── hooks/ │ │ │ └── useVoices.ts │ │ └── types/ │ │ └── clip.ts │ └── tsconfig.json ├── tts-python-livekit/ │ ├── .gitignore │ ├── .python-version │ ├── README.md │ ├── pyproject.toml │ └── src/ │ ├── __init__.py │ ├── agent_session/ │ │ ├── __init__.py │ │ ├── constants.py │ │ └── main.py │ ├── standalone_tts/ │ │ ├── __init__.py │ │ └── main.py │ └── utils.py ├── tts-python-quickstart/ │ ├── .gitignore │ ├── README.md │ ├── app.py │ ├── conftest.py │ ├── pyproject.toml │ └── test_app.py ├── tts-swift-quickstart/ │ ├── .gitignore │ ├── HumeDemo/ │ │ ├── Assets.xcassets/ │ │ │ ├── AccentColor.colorset/ │ │ │ │ └── Contents.json │ │ │ ├── AppIcon.appiconset/ │ │ │ │ └── Contents.json │ │ │ ├── Contents.json │ │ │ └── Logo.imageset/ │ │ │ └── Contents.json │ │ ├── HumeDemoApp.swift │ │ ├── Info.plist │ │ ├── Preview Content/ │ │ │ └── Preview Assets.xcassets/ │ │ │ └── Contents.json │ │ └── TTSDemo/ │ │ ├── Clients/ │ │ │ └── AccessTokenClient.swift │ │ ├── Extensions/ │ │ │ └── Dictionary+Additions.swift │ │ ├── Mocks.swift │ │ └── Views/ │ │ ├── Components/ │ │ │ ├── RowView.swift │ │ │ └── TTSEventView.swift │ │ ├── Models/ │ │ │ ├── TTSEvent.swift │ │ │ ├── TTSModel+Types.swift │ │ │ └── TTSModel.swift │ │ ├── Modifiers/ │ │ │ └── FlippedUpsideDown.swift │ │ └── TTSView.swift │ ├── HumeDemo.xcodeproj/ │ │ ├── project.pbxproj │ │ └── xcshareddata/ │ │ └── xcschemes/ │ │ └── HumeDemo.xcscheme │ ├── README.md │ └── access_token_service/ │ ├── README.md │ ├── requirements.txt │ └── run_token_service.py ├── tts-typescript-lipsync/ │ ├── .gitignore │ ├── README.md │ ├── index.html │ ├── index.tsx │ ├── mouth.ts │ ├── package.json │ ├── tsconfig.json │ └── vite-env.d.ts ├── tts-typescript-quickstart/ │ ├── .gitignore │ ├── README.md │ ├── audio_player.ts │ ├── index.test.ts │ ├── index.ts │ ├── package.json │ ├── pnpm-workspace.yaml │ ├── tsconfig.json │ ├── vite-env.d.ts │ ├── vite.config.ts │ └── vitest.config.ts └── tts-unity-quickstart/ ├── .gitignore ├── Assets/ │ ├── DefaultScene.unity │ ├── DefaultScene.unity.meta │ ├── Scripts/ │ │ ├── HumeSpeaker.cs │ │ ├── HumeSpeaker.cs.meta │ │ ├── SceneBuilder.cs │ │ └── SceneBuilder.cs.meta │ └── Scripts.meta ├── Packages/ │ └── manifest.json ├── ProjectSettings/ │ ├── AudioManager.asset │ ├── ClusterInputManager.asset │ ├── DynamicsManager.asset │ ├── EditorBuildSettings.asset │ ├── EditorSettings.asset │ ├── GraphicsSettings.asset │ ├── InputManager.asset │ ├── MemorySettings.asset │ ├── MultiplayerManager.asset │ ├── NavMeshAreas.asset │ ├── PackageManagerSettings.asset │ ├── Physics2DSettings.asset │ ├── PresetManager.asset │ ├── ProjectSettings.asset │ ├── ProjectVersion.txt │ ├── QualitySettings.asset │ ├── SceneTemplateSettings.json │ ├── TagManager.asset │ ├── TimeManager.asset │ ├── UnityConnectSettings.asset │ ├── VFXManager.asset │ └── VersionControlSettings.asset └── README.md ================================================ FILE CONTENTS ================================================ ================================================ FILE: .github/dependabot.yml ================================================ version: 2 updates: # check for updated versions of github actions on a weekly basis - package-ecosystem: 'github-actions' directory: '/' schedule: interval: 'weekly' day: 'monday' time: '06:00' timezone: 'America/New_York' commit-message: prefix: '[github actions] ' open-pull-requests-limit: 20 # check for updated versions of npm dependencies on a daily basis - package-ecosystem: 'npm' directories: - '/evi/evi-next-js-app-router-quickstart' - '/evi/evi-next-js-function-calling' - '/evi/evi-next-js-pages-router-quickstart' - '/evi/evi-typescript-chat-history' - '/evi/evi-typescript-function-calling' - '/evi/evi-typescript-quickstart' - '/evi/evi-typescript-webhooks' - '/evi/evi-vue-widget' - '/tts/tts-next-js-agora' - '/tts/tts-next-js-vercel-ai-sdk' - '/tts/tts-typescript-lipsync' - '/tts/tts-typescript-quickstart' schedule: interval: 'daily' time: '06:00' timezone: 'America/New_York' commit-message: prefix: '[npm] ' open-pull-requests-limit: 20 versioning-strategy: 'increase' # check for updated versions of NuGet (.NET) dependencies on a daily basis - package-ecosystem: 'nuget' directories: - '/evi/evi-dotnet-quickstart' - '/tts/tts-dotnet-quickstart' schedule: interval: 'daily' time: '06:00' timezone: 'America/New_York' commit-message: prefix: '[nuget] ' open-pull-requests-limit: 20 # check for updated versions of pip dependencies on a daily basis # (excludes uv-based projects, see below) # pip ecosystem doesn't update uv.lock - package-ecosystem: 'pip' directories: - '/evi/evi-python-chat-history' - '/evi/evi-python-webhooks' - '/evi/evi-python-wss-clm-endpoint' schedule: interval: 'daily' time: '06:00' timezone: 'America/New_York' commit-message: prefix: '[pip] ' open-pull-requests-limit: 20 # uv-based Python projects (pyproject.toml + uv.lock) - package-ecosystem: 'uv' directories: - '/evi/evi-python-quickstart' - '/evi/evi-python-clm-sse' - '/evi/evi-python-clm-wss' - '/evi/evi-python-control-plane' - '/evi/evi-python-phone-calling-proxy-server' - '/tts/tts-python-quickstart' - '/tts/tts-python-livekit' - '/expression-measurement/streaming/python-streaming-example' schedule: interval: 'daily' time: '06:00' timezone: 'America/New_York' commit-message: prefix: '[uv] ' open-pull-requests-limit: 20 ================================================ FILE: .github/workflows/dependabot-auto-merge.yml ================================================ name: Dependabot auto-merge on: pull_request_target: types: [opened, reopened, ready_for_review, synchronize] permissions: contents: write pull-requests: write jobs: dependabot-auto-merge: runs-on: ubuntu-latest if: github.actor == 'dependabot[bot]' steps: - name: Dependabot metadata id: metadata uses: dependabot/fetch-metadata@v3.1.0 with: github-token: ${{ secrets.GITHUB_TOKEN }} - name: Approve PR env: PR_URL: ${{ github.event.pull_request.html_url }} GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} run: gh pr review --approve "$PR_URL" - name: Enable auto-merge for Dependabot PRs env: PR_URL: ${{ github.event.pull_request.html_url }} GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} run: gh pr merge --auto --squash "$PR_URL" ================================================ FILE: .github/workflows/test-examples.yml ================================================ name: test-examples on: pull_request: types: [opened, synchronize, reopened] push: branches: [main, master] workflow_dispatch: schedule: # Run tests for all packages that have tests weekly on Tuesday - cron: '0 12 * * 2' permissions: contents: read jobs: detect-changes: runs-on: ubuntu-latest outputs: evi_py_quickstart: ${{ steps.filter.outputs.evi_py_quickstart }} evi_py_chat_history: ${{ steps.filter.outputs.evi_py_chat_history }} evi_py_clm_sse: ${{ steps.filter.outputs.evi_py_clm_sse }} evi_py_clm_wss: ${{ steps.filter.outputs.evi_py_clm_wss }} evi_py_control_plane: ${{ steps.filter.outputs.evi_py_control_plane }} evi_py_phone_calling: ${{ steps.filter.outputs.evi_py_phone_calling }} evi_py_webhooks: ${{ steps.filter.outputs.evi_py_webhooks }} evi_py_wss_clm_endpoint: ${{ steps.filter.outputs.evi_py_wss_clm_endpoint }} tts_py_livekit: ${{ steps.filter.outputs.tts_py_livekit }} tts_py_quickstart: ${{ steps.filter.outputs.tts_py_quickstart }} evi_app_router: ${{ steps.filter.outputs.evi_app_router }} evi_function_calling: ${{ steps.filter.outputs.evi_function_calling }} evi_pages_router: ${{ steps.filter.outputs.evi_pages_router }} evi_react_native: ${{ steps.filter.outputs.evi_react_native }} evi_ts_chat_history: ${{ steps.filter.outputs.evi_ts_chat_history }} evi_ts_function_calling: ${{ steps.filter.outputs.evi_ts_function_calling }} evi_ts_quickstart: ${{ steps.filter.outputs.evi_ts_quickstart }} evi_ts_webhooks: ${{ steps.filter.outputs.evi_ts_webhooks }} evi_vue_widget: ${{ steps.filter.outputs.evi_vue_widget }} tts_next_agora: ${{ steps.filter.outputs.tts_next_agora }} tts_next_vercel_ai_sdk: ${{ steps.filter.outputs.tts_next_vercel_ai_sdk }} tts_ts_lipsync: ${{ steps.filter.outputs.tts_ts_lipsync }} tts_ts_quickstart: ${{ steps.filter.outputs.tts_ts_quickstart }} evi_dotnet_quickstart: ${{ steps.filter.outputs.evi_dotnet_quickstart }} tts_dotnet_quickstart: ${{ steps.filter.outputs.tts_dotnet_quickstart }} exp_meas_ts_raw_text_processor: ${{ steps.filter.outputs.exp_meas_ts_raw_text_processor }} exp_meas_py_streaming: ${{ steps.filter.outputs.exp_meas_py_streaming }} steps: - name: Checkout uses: actions/checkout@v6 with: fetch-depth: 0 - name: Detect changed folders id: filter uses: dorny/paths-filter@v4 with: filters: | evi_py_quickstart: - 'evi/evi-python-quickstart/**' evi_py_chat_history: - 'evi/evi-python-chat-history/**' evi_py_clm_sse: - 'evi/evi-python-clm-sse/**' evi_py_clm_wss: - 'evi/evi-python-clm-wss/**' evi_py_control_plane: - 'evi/evi-python-control-plane/**' evi_py_phone_calling: - 'evi/evi-python-phone-calling-proxy-server/**' evi_py_webhooks: - 'evi/evi-python-webhooks/**' evi_py_wss_clm_endpoint: - 'evi/evi-python-wss-clm-endpoint/**' tts_py_livekit: - 'tts/tts-python-livekit/**' tts_py_quickstart: - 'tts/tts-python-quickstart/**' evi_app_router: - 'evi/evi-next-js-app-router-quickstart/**' evi_function_calling: - 'evi/evi-next-js-function-calling/**' evi_pages_router: - 'evi/evi-next-js-pages-router-quickstart/**' evi_react_native: - 'evi/evi-react-native/**' evi_ts_chat_history: - 'evi/evi-typescript-chat-history/**' evi_ts_function_calling: - 'evi/evi-typescript-function-calling/**' evi_ts_quickstart: - 'evi/evi-typescript-quickstart/**' evi_ts_webhooks: - 'evi/evi-typescript-webhooks/**' evi_vue_widget: - 'evi/evi-vue-widget/**' tts_next_agora: - 'tts/tts-next-js-agora/**' tts_next_vercel_ai_sdk: - 'tts/tts-next-js-vercel-ai-sdk/**' tts_ts_lipsync: - 'tts/tts-typescript-lipsync/**' tts_ts_quickstart: - 'tts/tts-typescript-quickstart/**' evi_dotnet_quickstart: - 'evi/evi-dotnet-quickstart/**' tts_dotnet_quickstart: - 'tts/tts-dotnet-quickstart/**' exp_meas_ts_raw_text_processor: - 'expression-measurement/batch/typescript-raw-text-processor/**' exp_meas_py_streaming: - 'expression-measurement/streaming/python-streaming-example/**' test-evi-typescript-quickstart: needs: detect-changes if: github.event_name == 'workflow_dispatch' || github.event_name == 'schedule' || needs.detect-changes.outputs.evi_ts_quickstart == 'true' runs-on: ubuntu-latest steps: - name: Checkout uses: actions/checkout@v6 - name: Setup pnpm uses: pnpm/action-setup@v6 with: version: 10.20.0 - name: Setup Node uses: actions/setup-node@v6 with: node-version: 20 - name: Install deps (EVI quickstart) working-directory: evi/evi-typescript-quickstart run: pnpm install --frozen-lockfile - name: Build working-directory: evi/evi-typescript-quickstart run: pnpm run build - name: Run tests in evi-typescript-quickstart working-directory: evi/evi-typescript-quickstart env: TEST_HUME_API_KEY: ${{ secrets.TEST_HUME_API_KEY }} TEST_HUME_SECRET_KEY: ${{ secrets.TEST_HUME_SECRET_KEY }} run: pnpm run test test-tts-typescript-quickstart: needs: detect-changes if: github.event_name == 'workflow_dispatch' || github.event_name == 'schedule' || needs.detect-changes.outputs.tts_ts_quickstart == 'true' runs-on: ubuntu-latest steps: - name: Checkout uses: actions/checkout@v6 - name: Setup pnpm uses: pnpm/action-setup@v6 with: version: 10.20.0 - name: Setup Node uses: actions/setup-node@v6 with: node-version: 20 - name: Install deps (TTS quickstart) working-directory: tts/tts-typescript-quickstart run: pnpm install --frozen-lockfile - name: Build working-directory: tts/tts-typescript-quickstart run: pnpm run build - name: Run tests in tts-typescript-quickstart working-directory: tts/tts-typescript-quickstart env: TEST_HUME_API_KEY: ${{ secrets.TEST_HUME_API_KEY }} TEST_HUME_SECRET_KEY: ${{ secrets.TEST_HUME_SECRET_KEY }} run: pnpm run test test-exp-meas-typescript-raw-text-processor: needs: detect-changes if: github.event_name == 'workflow_dispatch' || github.event_name == 'schedule' || needs.detect-changes.outputs.exp_meas_ts_raw_text_processor == 'true' runs-on: ubuntu-latest steps: - name: Checkout uses: actions/checkout@v6 - name: Setup Node uses: actions/setup-node@v6 with: node-version: 20 - name: Setup pnpm uses: pnpm/action-setup@v6 with: version: 10.20.0 - name: Install deps (expression-measurement batch typescript-raw-text-processor) working-directory: expression-measurement/batch/typescript-raw-text-processor run: pnpm install --frozen-lockfile - name: Run tests in typescript-raw-text-processor working-directory: expression-measurement/batch/typescript-raw-text-processor env: HUME_API_KEY: ${{ secrets.TEST_HUME_API_KEY_EXP_MEASUREMENT }} run: pnpm run test test-exp-meas-python-streaming: needs: detect-changes if: github.event_name == 'workflow_dispatch' || github.event_name == 'schedule' || needs.detect-changes.outputs.exp_meas_py_streaming == 'true' runs-on: ubuntu-latest steps: - name: Checkout uses: actions/checkout@v6 - name: Install uv run: | curl -LsSf https://astral.sh/uv/install.sh | sh echo "$HOME/.cargo/bin" >> $GITHUB_PATH shell: bash - name: Run tests in expression-measurement/streaming/python-streaming-example working-directory: expression-measurement/streaming/python-streaming-example env: HUME_API_KEY: ${{ secrets.TEST_HUME_API_KEY_EXP_MEASUREMENT }} run: uv sync --extra dev && uv run pytest test_main.py -v test-evi-python-quickstart: needs: detect-changes if: github.event_name == 'workflow_dispatch' || github.event_name == 'schedule' || needs.detect-changes.outputs.evi_py_quickstart == 'true' runs-on: ubuntu-latest steps: - name: Checkout uses: actions/checkout@v6 - name: Set up python uses: actions/setup-python@v6 with: python-version: '3.11' - name: Bootstrap poetry run: | curl -sSL https://install.python-poetry.org | python - -y --version 1.8.5 echo "$HOME/.local/bin" >> $GITHUB_PATH - name: Install system dependencies for audio run: | sudo apt-get --yes update sudo apt-get --yes install libportaudio2 - name: Install uv run: | curl -LsSf https://astral.sh/uv/install.sh | sh echo "$HOME/.cargo/bin" >> $GITHUB_PATH shell: bash - name: Run tests in evi-python-quickstart working-directory: evi/evi-python-quickstart run: uv run pytest test_quickstart.py -v env: TEST_HUME_API_KEY: ${{ secrets.TEST_HUME_API_KEY }} test-tts-python-quickstart: needs: detect-changes if: github.event_name == 'workflow_dispatch' || github.event_name == 'schedule' || needs.detect-changes.outputs.tts_py_quickstart == 'true' runs-on: ubuntu-latest steps: - name: Checkout uses: actions/checkout@v6 - name: Set up python uses: actions/setup-python@v6 with: python-version: '3.11' - name: Bootstrap poetry run: | curl -sSL https://install.python-poetry.org | python - -y --version 1.8.5 echo "$HOME/.local/bin" >> $GITHUB_PATH - name: Install system dependencies for audio run: | sudo apt-get --yes update sudo apt-get --yes install libportaudio2 - name: Install uv run: | curl -LsSf https://astral.sh/uv/install.sh | sh echo "$HOME/.cargo/bin" >> $GITHUB_PATH shell: bash - name: Run tests in tts-python-quickstart working-directory: tts/tts-python-quickstart run: uv run pytest test_app.py -v env: TEST_HUME_API_KEY: ${{ secrets.TEST_HUME_API_KEY }} evi-python-chat-history: needs: detect-changes if: github.event_name == 'workflow_dispatch' || needs.detect-changes.outputs.evi_py_chat_history == 'true' runs-on: ubuntu-latest steps: - name: Checkout uses: actions/checkout@v6 - name: Set up Python uses: actions/setup-python@v6 with: python-version: '3.11' - name: Install Poetry run: | curl -sSL https://install.python-poetry.org | python - -y --version 1.8.5 echo "$HOME/.local/bin" >> $GITHUB_PATH - name: Install dependencies working-directory: evi/evi-python-chat-history run: poetry install --no-interaction --no-root - name: Verify working-directory: evi/evi-python-chat-history run: poetry run python -c "print('OK')" evi-python-clm-sse: needs: detect-changes if: github.event_name == 'workflow_dispatch' || needs.detect-changes.outputs.evi_py_clm_sse == 'true' runs-on: ubuntu-latest steps: - name: Checkout uses: actions/checkout@v6 - name: Set up Python uses: actions/setup-python@v6 with: python-version: '3.11' - name: Install uv run: | curl -LsSf https://astral.sh/uv/install.sh | sh echo "$HOME/.cargo/bin" >> $GITHUB_PATH shell: bash - name: Install dependencies working-directory: evi/evi-python-clm-sse run: uv sync - name: Verify working-directory: evi/evi-python-clm-sse run: uv run python -c "print('OK')" evi-python-clm-wss: needs: detect-changes if: github.event_name == 'workflow_dispatch' || needs.detect-changes.outputs.evi_py_clm_wss == 'true' runs-on: ubuntu-latest steps: - name: Checkout uses: actions/checkout@v6 - name: Set up Python uses: actions/setup-python@v6 with: python-version: '3.11' - name: Install uv run: | curl -LsSf https://astral.sh/uv/install.sh | sh echo "$HOME/.cargo/bin" >> $GITHUB_PATH shell: bash - name: Install dependencies working-directory: evi/evi-python-clm-wss run: uv sync - name: Verify working-directory: evi/evi-python-clm-wss run: uv run python -c "print('OK')" evi-python-control-plane: needs: detect-changes if: github.event_name == 'workflow_dispatch' || needs.detect-changes.outputs.evi_py_control_plane == 'true' runs-on: ubuntu-latest steps: - name: Checkout uses: actions/checkout@v6 - name: Set up Python uses: actions/setup-python@v6 with: python-version: '3.11' - name: Install system dependencies for audio run: | sudo apt-get --yes update sudo apt-get --yes install libportaudio2 - name: Install uv run: | curl -LsSf https://astral.sh/uv/install.sh | sh echo "$HOME/.cargo/bin" >> $GITHUB_PATH shell: bash - name: Install dependencies working-directory: evi/evi-python-control-plane run: uv sync - name: Verify working-directory: evi/evi-python-control-plane run: uv run python -c "print('OK')" evi-python-phone-calling-proxy-server: needs: detect-changes if: github.event_name == 'workflow_dispatch' || needs.detect-changes.outputs.evi_py_phone_calling == 'true' runs-on: ubuntu-latest steps: - name: Checkout uses: actions/checkout@v6 - name: Set up Python uses: actions/setup-python@v6 with: python-version: '3.11' - name: Install uv run: | curl -LsSf https://astral.sh/uv/install.sh | sh echo "$HOME/.cargo/bin" >> $GITHUB_PATH shell: bash - name: Install dependencies working-directory: evi/evi-python-phone-calling-proxy-server run: uv sync - name: Verify working-directory: evi/evi-python-phone-calling-proxy-server run: uv run python -c "print('OK')" evi-python-webhooks: needs: detect-changes if: github.event_name == 'workflow_dispatch' || needs.detect-changes.outputs.evi_py_webhooks == 'true' runs-on: ubuntu-latest steps: - name: Checkout uses: actions/checkout@v6 - name: Set up Python uses: actions/setup-python@v6 with: python-version: '3.11' - name: Install Poetry run: | curl -sSL https://install.python-poetry.org | python - -y --version 1.8.5 echo "$HOME/.local/bin" >> $GITHUB_PATH - name: Install dependencies working-directory: evi/evi-python-webhooks run: poetry install --no-interaction --no-root - name: Verify working-directory: evi/evi-python-webhooks run: poetry run python -c "print('OK')" evi-python-wss-clm-endpoint: needs: detect-changes if: github.event_name == 'workflow_dispatch' || needs.detect-changes.outputs.evi_py_wss_clm_endpoint == 'true' runs-on: ubuntu-latest steps: - name: Checkout uses: actions/checkout@v6 - name: Set up Python uses: actions/setup-python@v6 with: python-version: '3.11' - name: Install Poetry run: | curl -sSL https://install.python-poetry.org | python - -y --version 1.8.5 echo "$HOME/.local/bin" >> $GITHUB_PATH - name: Install dependencies working-directory: evi/evi-python-wss-clm-endpoint run: poetry install --no-interaction --no-root - name: Verify working-directory: evi/evi-python-wss-clm-endpoint run: poetry run python -c "print('OK')" tts-python-livekit: needs: detect-changes if: github.event_name == 'workflow_dispatch' || needs.detect-changes.outputs.tts_py_livekit == 'true' runs-on: ubuntu-latest steps: - name: Checkout uses: actions/checkout@v6 - name: Set up Python uses: actions/setup-python@v6 with: python-version: '3.11' - name: Install system dependencies for audio run: | sudo apt-get --yes update sudo apt-get --yes install libportaudio2 libasound2-dev - name: Install uv run: | curl -LsSf https://astral.sh/uv/install.sh | sh echo "$HOME/.cargo/bin" >> $GITHUB_PATH shell: bash - name: Install dependencies working-directory: tts/tts-python-livekit run: uv sync - name: Verify working-directory: tts/tts-python-livekit run: uv run python -c "print('OK')" evi-dotnet-quickstart: needs: detect-changes if: github.event_name == 'workflow_dispatch' || github.event_name == 'schedule' || needs.detect-changes.outputs.evi_dotnet_quickstart == 'true' runs-on: ubuntu-latest steps: - name: Checkout uses: actions/checkout@v6 - name: Setup .NET uses: actions/setup-dotnet@v5 with: dotnet-version: '9.0.x' - name: Restore working-directory: evi/evi-dotnet-quickstart run: dotnet restore evi-csharp-quickstart.tests.csproj - name: Build working-directory: evi/evi-dotnet-quickstart run: dotnet build evi-csharp-quickstart.tests.csproj --no-restore -c Release - name: Run tests working-directory: evi/evi-dotnet-quickstart env: TEST_HUME_API_KEY: ${{ secrets.TEST_HUME_API_KEY }} HUME_API_KEY: ${{ secrets.TEST_HUME_API_KEY }} run: dotnet test evi-csharp-quickstart.tests.csproj --no-build -c Release -v normal tts-dotnet-quickstart: needs: detect-changes if: github.event_name == 'workflow_dispatch' || github.event_name == 'schedule' || needs.detect-changes.outputs.tts_dotnet_quickstart == 'true' runs-on: ubuntu-latest steps: - name: Checkout uses: actions/checkout@v6 - name: Setup .NET uses: actions/setup-dotnet@v5 with: dotnet-version: '9.0.x' - name: Restore working-directory: tts/tts-dotnet-quickstart run: dotnet restore tts-csharp-quickstart.tests.csproj - name: Build working-directory: tts/tts-dotnet-quickstart run: dotnet build tts-csharp-quickstart.tests.csproj --no-restore -c Release - name: Run tests working-directory: tts/tts-dotnet-quickstart env: TEST_HUME_API_KEY: ${{ secrets.TEST_HUME_API_KEY }} HUME_API_KEY: ${{ secrets.TEST_HUME_API_KEY }} run: dotnet test tts-csharp-quickstart.tests.csproj --no-build -c Release -v normal evi-next-js-app-router-quickstart: needs: detect-changes if: github.event_name == 'workflow_dispatch' || github.event_name == 'schedule' || needs.detect-changes.outputs.evi_app_router == 'true' runs-on: ubuntu-latest steps: - name: Checkout uses: actions/checkout@v6 - name: Setup pnpm uses: pnpm/action-setup@v6 with: version: 10.20.0 - name: Setup Node uses: actions/setup-node@v6 with: node-version: 20 cache: 'pnpm' cache-dependency-path: evi/evi-next-js-app-router-quickstart/pnpm-lock.yaml - name: Install dependencies working-directory: evi/evi-next-js-app-router-quickstart run: pnpm install --frozen-lockfile - name: Build working-directory: evi/evi-next-js-app-router-quickstart run: pnpm run build - name: Install Playwright browsers working-directory: evi/evi-next-js-app-router-quickstart run: pnpm exec playwright install --with-deps chromium - name: Run tests working-directory: evi/evi-next-js-app-router-quickstart env: TEST_HUME_API_KEY: ${{ secrets.TEST_HUME_API_KEY }} TEST_HUME_SECRET_KEY: ${{ secrets.TEST_HUME_SECRET_KEY }} HUME_API_KEY: ${{ secrets.TEST_HUME_API_KEY }} HUME_SECRET_KEY: ${{ secrets.TEST_HUME_SECRET_KEY }} run: pnpm run test # Notifies Slack when the daily (cron) test run finishes. Add repo secret SLACK_WEBHOOK_URL (Incoming Webhook URL). notify-slack-cron: needs: - test-evi-typescript-quickstart - test-tts-typescript-quickstart - test-exp-meas-typescript-raw-text-processor - test-exp-meas-python-streaming - evi-next-js-app-router-quickstart - test-evi-python-quickstart - test-tts-python-quickstart - evi-dotnet-quickstart - tts-dotnet-quickstart # always() ensures we run even when one of the test jobs failed, so we can post the result to Slack if: always() && (github.event_name == 'schedule' || github.event_name == 'workflow_dispatch') runs-on: ubuntu-latest steps: - name: Checkout uses: actions/checkout@v6 - name: Notify Slack env: SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_URL }} EVI_TS: ${{ needs.test-evi-typescript-quickstart.result }} TTS_TS: ${{ needs.test-tts-typescript-quickstart.result }} EXP_MEAS_TS_RAW_TEXT_PROCESSOR: ${{ needs.test-exp-meas-typescript-raw-text-processor.result }} EVI_PY: ${{ needs.test-evi-python-quickstart.result }} TTS_PY: ${{ needs.test-tts-python-quickstart.result }} EXP_MEAS_PY_STREAMING: ${{ needs.test-exp-meas-python-streaming.result }} EVI_DOTNET: ${{ needs.evi-dotnet-quickstart.result }} TTS_DOTNET: ${{ needs.tts-dotnet-quickstart.result }} EVI_APP: ${{ needs.evi-next-js-app-router-quickstart.result }} run: | if [ -z "$SLACK_WEBHOOK_URL" ]; then echo "::warning::SLACK_WEBHOOK_URL secret is not set. Add it in repo Settings → Secrets and variables → Actions to get Slack notifications." echo "## Slack notification skipped" >> $GITHUB_STEP_SUMMARY echo "Add the **SLACK_WEBHOOK_URL** repository secret (Settings → Secrets and variables → Actions) to enable Slack notifications." >> $GITHUB_STEP_SUMMARY exit 0 fi status() { [ "$1" = "success" ] && echo "✅ $1" || echo "❌ $1"; } # SDK versions from repo manifests (package name + version spec) EVI_TS_VER=$(jq -r '.dependencies.hume' evi/evi-typescript-quickstart/package.json 2>/dev/null || echo "?") TTS_TS_VER=$(jq -r '.dependencies.hume' tts/tts-typescript-quickstart/package.json 2>/dev/null || echo "?") EXP_MEAS_TS_VER=$(jq -r '.dependencies.hume' expression-measurement/batch/typescript-raw-text-processor/package.json 2>/dev/null || echo "?") EVI_APP_VER=$(jq -r '.dependencies["@humeai/voice-react"]' evi/evi-next-js-app-router-quickstart/package.json 2>/dev/null || echo "?") EVI_PY_VER=$(python3 -c "import tomllib; d=tomllib.load(open('evi/evi-python-quickstart/pyproject.toml','rb')); deps=d.get('project',{}).get('dependencies',[]); print(next((x for x in deps if 'hume' in x), ''))" 2>/dev/null || echo "?") TTS_PY_VER=$(python3 -c "import tomllib; d=tomllib.load(open('tts/tts-python-quickstart/pyproject.toml','rb')); deps=d.get('project',{}).get('dependencies',[]); print(next((x for x in deps if 'hume' in x), ''))" 2>/dev/null || echo "?") EXP_MEAS_PY_VER=$(python3 -c "import tomllib; d=tomllib.load(open('expression-measurement/streaming/python-streaming-example/pyproject.toml','rb')); deps=d.get('project',{}).get('dependencies',[]); print(next((x for x in deps if 'hume' in x), ''))" 2>/dev/null || echo "?") EVI_DOTNET_VER=$(sed -n 's/.*PackageVersion Include="Hume" Version="\([^"]*\)".*/\1/p' Directory.Packages.props 2>/dev/null | head -1 || echo "?") TTS_DOTNET_VER=$(sed -n 's/.*PackageVersion Include="Hume" Version="\([^"]*\)".*/\1/p' Directory.Packages.props 2>/dev/null | head -1 || echo "?") text="*Daily test-examples cron job finished* • TS SDK (hume $EVI_TS_VER): evi-typescript-quickstart: $(status "$EVI_TS") • TS SDK (hume $TTS_TS_VER): tts-typescript-quickstart: $(status "$TTS_TS") • TS SDK (hume $EXP_MEAS_TS_VER): exp-meas-ts-raw-text-processor (batch): $(status "$EXP_MEAS_TS_RAW_TEXT_PROCESSOR") • React SDK (@humeai/voice-react $EVI_APP_VER): evi-next-js-app-router-quickstart: $(status "$EVI_APP") • Py SDK (hume $EVI_PY_VER): evi-python-quickstart: $(status "$EVI_PY") • Py SDK (hume $TTS_PY_VER): tts-python-quickstart: $(status "$TTS_PY") • Py SDK (hume $EXP_MEAS_PY_VER): exp-meas-python-streaming: $(status "$EXP_MEAS_PY_STREAMING") • .NET SDK (Hume $EVI_DOTNET_VER): evi-dotnet-quickstart: $(status "$EVI_DOTNET") • .NET SDK (Hume $TTS_DOTNET_VER): tts-dotnet-quickstart: $(status "$TTS_DOTNET") <${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }}|View run>" payload=$(jq -n --arg text "$(echo "$text" | sed 's/^[[:space:]]*//')" '{text: $text}') res=$(curl -sS -w "%{http_code}" -o /tmp/slack_resp -X POST -H "Content-Type: application/json" --data "$payload" "$SLACK_WEBHOOK_URL") || true if [ "$res" = "200" ]; then echo "Slack notification sent successfully." echo "## Slack notification sent" >> $GITHUB_STEP_SUMMARY else echo "::warning::Slack webhook returned HTTP $res. Check the webhook URL and channel." echo "## Slack notification failed (HTTP $res)" >> $GITHUB_STEP_SUMMARY fi evi-next-js-function-calling: needs: detect-changes if: github.event_name == 'workflow_dispatch' || needs.detect-changes.outputs.evi_function_calling == 'true' runs-on: ubuntu-latest env: HUME_API_KEY: ${{ secrets.TEST_HUME_API_KEY }} HUME_SECRET_KEY: ${{ secrets.TEST_HUME_SECRET_KEY }} steps: - name: Checkout uses: actions/checkout@v6 - name: Setup pnpm uses: pnpm/action-setup@v6 with: version: 10.20.0 - name: Setup Node uses: actions/setup-node@v6 with: node-version: 20 cache: 'pnpm' cache-dependency-path: evi/evi-next-js-function-calling/pnpm-lock.yaml - name: Install dependencies working-directory: evi/evi-next-js-function-calling run: pnpm install --frozen-lockfile - name: Build working-directory: evi/evi-next-js-function-calling run: pnpm run build evi-next-js-pages-router-quickstart: needs: detect-changes if: github.event_name == 'workflow_dispatch' || needs.detect-changes.outputs.evi_pages_router == 'true' runs-on: ubuntu-latest env: HUME_API_KEY: ${{ secrets.TEST_HUME_API_KEY }} HUME_SECRET_KEY: ${{ secrets.TEST_HUME_SECRET_KEY }} steps: - name: Checkout uses: actions/checkout@v6 - name: Setup pnpm uses: pnpm/action-setup@v6 with: version: 10.20.0 - name: Setup Node uses: actions/setup-node@v6 with: node-version: 20 cache: 'pnpm' cache-dependency-path: evi/evi-next-js-pages-router-quickstart/pnpm-lock.yaml - name: Install dependencies working-directory: evi/evi-next-js-pages-router-quickstart run: pnpm install --frozen-lockfile - name: Build working-directory: evi/evi-next-js-pages-router-quickstart run: pnpm run build evi-react-native: needs: detect-changes if: github.event_name == 'workflow_dispatch' || needs.detect-changes.outputs.evi_react_native == 'true' runs-on: ubuntu-latest steps: - name: Checkout uses: actions/checkout@v6 - name: Setup pnpm uses: pnpm/action-setup@v6 with: version: 10.20.0 - name: Setup Node uses: actions/setup-node@v6 with: node-version: 20 cache: 'pnpm' cache-dependency-path: evi/evi-react-native/pnpm-lock.yaml - name: Install dependencies working-directory: evi/evi-react-native run: pnpm install --frozen-lockfile - name: Lint working-directory: evi/evi-react-native run: pnpm run lint evi-typescript-chat-history: needs: detect-changes if: github.event_name == 'workflow_dispatch' || needs.detect-changes.outputs.evi_ts_chat_history == 'true' runs-on: ubuntu-latest steps: - name: Checkout uses: actions/checkout@v6 - name: Setup pnpm uses: pnpm/action-setup@v6 with: version: 10.20.0 - name: Setup Node uses: actions/setup-node@v6 with: node-version: 20 cache: 'pnpm' cache-dependency-path: evi/evi-typescript-chat-history/pnpm-lock.yaml - name: Install dependencies working-directory: evi/evi-typescript-chat-history run: pnpm install --frozen-lockfile - name: Build working-directory: evi/evi-typescript-chat-history run: pnpm run build evi-typescript-function-calling: needs: detect-changes if: github.event_name == 'workflow_dispatch' || needs.detect-changes.outputs.evi_ts_function_calling == 'true' runs-on: ubuntu-latest steps: - name: Checkout uses: actions/checkout@v6 - name: Setup pnpm uses: pnpm/action-setup@v6 with: version: 10.20.0 - name: Setup Node uses: actions/setup-node@v6 with: node-version: 20 - name: Install dependencies working-directory: evi/evi-typescript-function-calling run: pnpm install --frozen-lockfile - name: Build working-directory: evi/evi-typescript-function-calling run: pnpm run build evi-typescript-webhooks: needs: detect-changes if: github.event_name == 'workflow_dispatch' || needs.detect-changes.outputs.evi_ts_webhooks == 'true' runs-on: ubuntu-latest steps: - name: Checkout uses: actions/checkout@v6 - name: Setup pnpm uses: pnpm/action-setup@v6 with: version: 10.20.0 - name: Setup Node uses: actions/setup-node@v6 with: node-version: 20 - name: Install dependencies working-directory: evi/evi-typescript-webhooks run: pnpm install --frozen-lockfile evi-vue-widget: needs: detect-changes if: github.event_name == 'workflow_dispatch' || needs.detect-changes.outputs.evi_vue_widget == 'true' runs-on: ubuntu-latest steps: - name: Checkout uses: actions/checkout@v6 - name: Setup pnpm uses: pnpm/action-setup@v6 with: version: 10.20.0 - name: Setup Node uses: actions/setup-node@v6 with: node-version: 20 - name: Install dependencies working-directory: evi/evi-vue-widget run: pnpm install --frozen-lockfile - name: Build working-directory: evi/evi-vue-widget run: pnpm run build tts-next-js-agora: needs: detect-changes if: github.event_name == 'workflow_dispatch' || needs.detect-changes.outputs.tts_next_agora == 'true' runs-on: ubuntu-latest steps: - name: Checkout uses: actions/checkout@v6 - name: Setup pnpm uses: pnpm/action-setup@v6 with: version: 10.20.0 - name: Setup Node uses: actions/setup-node@v6 with: node-version: 20 cache: 'pnpm' cache-dependency-path: tts/tts-next-js-agora/pnpm-lock.yaml - name: Install dependencies working-directory: tts/tts-next-js-agora run: pnpm install --frozen-lockfile - name: Build working-directory: tts/tts-next-js-agora run: pnpm run build tts-next-js-vercel-ai-sdk: needs: detect-changes if: github.event_name == 'workflow_dispatch' || needs.detect-changes.outputs.tts_next_vercel_ai_sdk == 'true' runs-on: ubuntu-latest steps: - name: Checkout uses: actions/checkout@v6 - name: Setup pnpm uses: pnpm/action-setup@v6 with: version: 10.20.0 - name: Setup Node uses: actions/setup-node@v6 with: node-version: 20 cache: 'pnpm' cache-dependency-path: tts/tts-next-js-vercel-ai-sdk/pnpm-lock.yaml - name: Install dependencies working-directory: tts/tts-next-js-vercel-ai-sdk run: pnpm install --frozen-lockfile - name: Build working-directory: tts/tts-next-js-vercel-ai-sdk run: pnpm run build tts-typescript-lipsync: needs: detect-changes if: github.event_name == 'workflow_dispatch' || needs.detect-changes.outputs.tts_ts_lipsync == 'true' runs-on: ubuntu-latest steps: - name: Checkout uses: actions/checkout@v6 - name: Setup pnpm uses: pnpm/action-setup@v6 with: version: 10.20.0 - name: Setup Node uses: actions/setup-node@v6 with: node-version: 20 - name: Install dependencies working-directory: tts/tts-typescript-lipsync run: pnpm install --frozen-lockfile - name: Build working-directory: tts/tts-typescript-lipsync run: pnpm run build ================================================ FILE: .gitignore ================================================ .hume/ __pycache__/ .venv/ .DS_Store .env node_modules/ .pnpm-store/ dist/ .vscode/ .mypy_cache/ ================================================ FILE: Directory.Packages.props ================================================ true ================================================ FILE: LICENSE ================================================ MIT License Copyright (c) 2023 Hume AI Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. ================================================ FILE: README.md ================================================

Hume API Examples

Browse sample code and projects designed to help you integrate Hume APIs

📘 Documentation💬 Join us on Discord🔐 Getting your API Keys

## Overview Welcome to the official Hume API Examples repository! Here you'll find open-source example projects and quickstart guides to help you integrate the [Hume API](https://docs.hume.ai) across a variety of languages and frameworks. Use these examples to: - Add empathic Text-to-Speech (TTS) to your application - Build rich conversational agents with the Empathic Voice Interface (EVI) - Measure expressions with facial, vocal, and language-based analysis Whether you're using Python, TypeScript, Swift, C#, Flutter, Unity, or Next.js, there's something here to help you get started quickly. ## [Text-to-Speech (TTS)](https://dev.hume.ai/docs/text-to-speech-tts/overview) | Name | Language | Framework | | ------------------------------------------------------------------------------------------ | ---------- | --------------- | | [`tts-dotnet-quickstart`](/tts/tts-dotnet-quickstart/README.md) | C# | .NET | | [`tts-next-js-agora`](/tts/tts-next-js-agora/README.md) | TypeScript | Next.js | | [`tts-next-js-chat`](/tts/tts-next-js-chat/README.md) | TypeScript | Next.js | | [`tts-next-js-vercel-ai-sdk`](/tts/tts-next-js-vercel-ai-sdk/README.md) | TypeScript | Next.js | | [`tts-python-livekit`](/tts/tts-python-livekit/README.md) | Python | LiveKit | | [`tts-python-quickstart`](/tts/tts-python-quickstart/README.md) | Python | | | [`tts-swift-quickstart`](/tts/tts-swift-quickstart/README.md) | Swift | iOS | | [`tts-typescript-lipsync`](/tts/tts-typescript-lipsync/README.md) | TypeScript | | | [`tts-typescript-quickstart`](/tts/tts-typescript-quickstart/README.md) | TypeScript | | | [`tts-unity-quickstart`](/tts/tts-unity-quickstart/README.md) | C# | Unity | ## [Empathic Voice Interface (EVI)](https://dev.hume.ai/docs/empathic-voice-interface-evi/overview) | Name | Language | Framework | | ------------------------------------------------------------------------------------------ | ---------- | --------------- | | [`evi-dotnet-quickstart`](/evi/evi-dotnet-quickstart/README.md) | C# | .NET | | [`evi-flutter`](/evi/evi-flutter/README.md) | Dart | Flutter | | [`evi-next-js-app-router-quickstart`](/evi/evi-next-js-app-router-quickstart/README.md) | TypeScript | Next.js | | [`evi-next-js-function-calling`](/evi/evi-next-js-function-calling/README.md) | TypeScript | Next.js | | [`evi-next-js-pages-router-quickstart`](/evi/evi-next-js-pages-router-quickstart/README.md)| TypeScript | Next.js | | [`evi-prompting-examples`](/evi/evi-prompting-examples/README.md) | | | | [`evi-python-chat-history`](/evi/evi-python-chat-history/README.md) | Python | | | [`evi-python-clm-sse`](/evi/evi-python-clm-sse/README.md) | Python | | | [`evi-python-clm-wss`](/evi/evi-python-clm-wss/README.md) | Python | | | [`evi-python-control-plane`](/evi/evi-python-control-plane/README.md) | Python | | | [`evi-python-function-calling`](/evi/evi-python-function-calling/README.md) | Python | | | [`evi-python-phone-calling-proxy-server`](/evi/evi-python-phone-calling-proxy-server/README.md) | Python | Flask | | [`evi-python-quickstart`](/evi/evi-python-quickstart/README.md) | Python | | | [`evi-python-raw-api`](/evi/evi-python-raw-api/README.md) | Python | | | [`evi-python-webhooks`](/evi/evi-python-webhooks/README.md) | Python | FastAPI | | [`evi-python-wss-clm-endpoint`](/evi/evi-python-wss-clm-endpoint/) | Python | Modal | | [`evi-react-native`](/evi/evi-react-native/README.md) | TypeScript | React Native | | [`evi-swift-chat`](/evi/evi-swift-chat/README.md) | Swift | iOS | | [`evi-touchdesigner`](/evi/evi-touchdesigner/README.md) | Python | TouchDesigner | | [`evi-typescript-chat-history`](/evi/evi-typescript-chat-history/README.md) | TypeScript | | | [`evi-typescript-function-calling`](/evi/evi-typescript-function-calling/README.md) | TypeScript | Vite | | [`evi-typescript-proxy`](/evi/evi-typescript-proxy/README.md) | TypeScript | Node.js | | [`evi-typescript-quickstart`](/evi/evi-typescript-quickstart/README.md) | TypeScript | | | [`evi-typescript-webhooks`](/evi/evi-typescript-webhooks/README.md) | TypeScript | Express | | [`evi-unity-quickstart`](/evi/evi-unity-quickstart/README.md) | C# | Unity | | [`evi-vue-widget`](/evi/evi-vue-widget/README.md) | TypeScript | Vue | ## [Expression Measurement API](https://dev.hume.ai/docs/expression-measurement-api/overview) | Name | Models | Language | Framework | | -------------------------------------------------------------------------------------------------------- | ------------------------------------- | ---------- | ----------- | | [`visualization-example`](/expression-measurement/visualization-example/example-notebook.ipynb) | `face` | Python | | | [`python-top-emotions`](/expression-measurement/batch/python-top-emotions/README.md) | `face` | Python | | | [`typescript-raw-text-processor`](/expression-measurement/batch/typescript-raw-text-processor/README.md) | `language` | TypeScript | | | [`next-js-emotional-language`](/expression-measurement/batch/next-js-emotional-language/README.md) | `language` | TypeScript | Next.js | | [`next-js-streaming-example`](/expression-measurement/streaming/next-js-streaming-example/README.md) | `language`, `face`, `burst`, `speech` | TypeScript | Next.js | ## Authentication & Setup You must authenticate to use the Hume API. Your API key can be retrieved from the [Hume AI platform](https://app.hume.ai/keys). For detailed instructions, see our documentation on [getting your api keys](https://dev.hume.ai/docs/introduction/api-key). Each example project includes a `README.md` file with step-by-step instructions on: - Setting your API key (usually via environment variables) - Installing dependencies - Running the example ## License All projects are licensed under the MIT License - see the [LICENSE.txt](/LICENSE) file for details. ================================================ FILE: evi/evi-dotnet-quickstart/.gitignore ================================================ # Build outputs [Bb]in/ [Oo]bj/ # IDE .vs/ .idea/ *.user *.suo # macOS .DS_Store # Environment .env # Allow sample audio !sample_input.pcm ================================================ FILE: evi/evi-dotnet-quickstart/EviTests.cs ================================================ // To run tests: // dotnet test evi-csharp-quickstart.tests.csproj --logger "console;verbosity=detailed" using System; using System.Collections.Generic; using System.Linq; using System.Text.Json; using System.Threading.Tasks; using DotNetEnv; using Hume; using Hume.EmpathicVoice; using OneOf; using Xunit; using Xunit.Abstractions; namespace EviCsharpQuickstart.Tests; public class EviTestFixture : IAsyncLifetime { public string ApiKey { get; private set; } = string.Empty; public HumeClient? HumeClient { get; private set; } public Task InitializeAsync() { // Tests run from bin/Debug/net9.0/, so .env is 3 levels up Env.Load("../../../.env"); var apiKey = Environment.GetEnvironmentVariable("TEST_HUME_API_KEY") ?? Environment.GetEnvironmentVariable("HUME_API_KEY"); if (string.IsNullOrEmpty(apiKey)) { throw new InvalidOperationException( "API key is required. Set TEST_HUME_API_KEY (CI) or HUME_API_KEY."); } ApiKey = apiKey; HumeClient = new HumeClient(ApiKey); return Task.CompletedTask; } public Task DisposeAsync() { return Task.CompletedTask; } } [Collection("EviTests")] public class EviConnectionTests : IClassFixture { private readonly EviTestFixture _fixture; private readonly ITestOutputHelper _output; public EviConnectionTests(EviTestFixture fixture, ITestOutputHelper output) { _fixture = fixture; _output = output; } [Fact(DisplayName = "test fixture has API key")] public void TestFixture_HasApiKey() { Assert.False(string.IsNullOrEmpty(_fixture.ApiKey), "API key loaded"); Assert.NotNull(_fixture.HumeClient); } [Fact(DisplayName = "connects w/ API key, starts a chat, receives a chatId, stays alive for 2 seconds")] public async Task Connects_StartsChat_ReceivesChatId_StaysAlive() { string? chatId = null; var chatApi = _fixture.HumeClient!.EmpathicVoice.CreateChatApi(new ChatApi.Options { ApiKey = _fixture.ApiKey, SessionSettings = new ConnectSessionSettings(), }); chatApi.ChatMetadata.Subscribe(metadata => { chatId = metadata.ChatId; }); await chatApi.ConnectAsync(); for (int i = 0; i < 100; i++) { if (chatId != null) { break; } await Task.Delay(100); } Assert.NotNull(chatId); Assert.False(string.IsNullOrEmpty(chatId), "Expected chat_id from chat_metadata"); await Task.Delay(2000); await chatApi.DisposeAsync(); } [Fact(DisplayName = "connects w/ API key, verifies sessionSettings are passed on connect()")] public async Task Connects_VerifiesSessionSettingsOnConnect() { var sessionSettings = new ConnectSessionSettings { SystemPrompt = "You are a helpful assistant that verifies sessionSettings are passed on connect()", Variables = new Dictionary> { { "userName", OneOf.FromT0("John") }, { "userAge", OneOf.FromT1(30.0) }, { "isPremium", OneOf.FromT2(true) } } }; string? chatId = null; var chatApi = _fixture.HumeClient!.EmpathicVoice.CreateChatApi(new ChatApi.Options { ApiKey = _fixture.ApiKey, SessionSettings = sessionSettings, }); chatApi.ChatMetadata.Subscribe(metadata => { chatId = metadata.ChatId; }); await chatApi.ConnectAsync(); for (int i = 0; i < 100; i++) { if (chatId != null) { break; } await Task.Delay(100); } Assert.NotNull(chatId); Assert.False(string.IsNullOrEmpty(chatId), "Expected chat_id from chat_metadata"); await chatApi.DisposeAsync(); await Task.Delay(2000); // Fetch chat events and verify session settings var events = new List(); var request = new ChatsListChatEventsRequest { PageNumber = 0, AscendingOrder = true }; var pager = await _fixture.HumeClient!.EmpathicVoice.Chats.ListChatEventsAsync(chatId, request); await foreach (var evt in pager) { events.Add(evt); } var eventTypes = events.Select(e => e.Type.ToString()).ToList(); var sessionSettingsEvent = events.FirstOrDefault(e => e.Type.ToString() == "SESSION_SETTINGS"); if (sessionSettingsEvent == null) { var eventTypesStr = string.Join(", ", eventTypes); Assert.Fail( $"Expected SESSION_SETTINGS event but found none. Event types found: {eventTypesStr}. Total events: {events.Count}"); return; } Assert.NotNull(sessionSettingsEvent.MessageText); var parsedSettings = JsonSerializer.Deserialize(sessionSettingsEvent.MessageText!); Assert.Equal("session_settings", parsedSettings.GetProperty("type").GetString()); Assert.Equal("You are a helpful assistant that verifies sessionSettings are passed on connect()", parsedSettings.GetProperty("system_prompt").GetString()); var variables = parsedSettings.GetProperty("variables"); Assert.Equal("John", variables.GetProperty("userName").GetString()); Assert.Equal("30", variables.GetProperty("userAge").GetString()); Assert.Equal("true", variables.GetProperty("isPremium").GetString()); } [Fact(DisplayName = "connects w/ API key, verifies sessionSettings can be updated after connect()")] public async Task Connects_VerifiesSessionSettingsUpdatedAfterConnect() { string? chatId = null; var chatApi = _fixture.HumeClient!.EmpathicVoice.CreateChatApi(new ChatApi.Options { ApiKey = _fixture.ApiKey, SessionSettings = new ConnectSessionSettings(), }); chatApi.ChatMetadata.Subscribe(metadata => { chatId = metadata.ChatId; }); await chatApi.ConnectAsync(); for (int i = 0; i < 100; i++) { if (chatId != null) { break; } await Task.Delay(100); } Assert.NotNull(chatId); Assert.False(string.IsNullOrEmpty(chatId), "Expected chat_id from chat_metadata"); var updatedSettings = new SessionSettings { SystemPrompt = "You are a helpful test assistant with updated system prompt" }; await chatApi.Send(updatedSettings); await Task.Delay(1000); await chatApi.DisposeAsync(); await Task.Delay(1000); var events = new List(); var request = new ChatsListChatEventsRequest { PageNumber = 0, AscendingOrder = true }; var pager = await _fixture.HumeClient!.EmpathicVoice.Chats.ListChatEventsAsync(chatId, request); await foreach (var evt in pager) { events.Add(evt); } var sessionSettingsEvents = events.Where(e => (string)e.Type == "SESSION_SETTINGS").ToList(); Assert.True(sessionSettingsEvents.Count >= 1, $"Expected at least 1 SESSION_SETTINGS event. Found event types: {string.Join(", ", events.Select(e => e.Type))}"); var updatedEvent = sessionSettingsEvents.Last(); Assert.NotNull(updatedEvent.MessageText); var parsedSettings = JsonSerializer.Deserialize(updatedEvent.MessageText!); Assert.Equal("session_settings", parsedSettings.GetProperty("type").GetString()); Assert.Equal("You are a helpful test assistant with updated system prompt", parsedSettings.GetProperty("system_prompt").GetString()); } } [CollectionDefinition("EviTests")] public class EviTestCollection : ICollectionFixture { } ================================================ FILE: evi/evi-dotnet-quickstart/Program.cs ================================================ using System; using System.IO; using System.Linq; using System.Threading.Tasks; using DotNetEnv; using Hume; using Hume.EmpathicVoice; Env.Load(); var apiKey = Environment.GetEnvironmentVariable("HUME_API_KEY") ?? throw new InvalidOperationException("HUME_API_KEY environment variable is required. See README.md for setup instructions."); var client = new HumeClient(apiKey); // Create a signal to wait for Chat Metadata var chatMetadataReceived = new TaskCompletionSource(); // Create the ChatApi instance var chatApi = client.EmpathicVoice.CreateChatApi(new ChatApi.Options { ApiKey = apiKey, SessionSettings = new ConnectSessionSettings(), }); // Subscribe to events chatApi.AssistantMessage.Subscribe(message => { Console.WriteLine($"Assistant: {message.Message?.Content}"); }); chatApi.UserMessage.Subscribe(message => { Console.WriteLine($"User: {message.Message?.Content}"); }); chatApi.AudioOutput.Subscribe(audio => { Console.WriteLine($"Received audio chunk: {audio.Data?.Length ?? 0} bytes"); }); chatApi.ChatMetadata.Subscribe(metadata => { Console.WriteLine($"Chat Metadata - Chat ID: {metadata.ChatId}"); chatMetadataReceived.TrySetResult(true); }); // Connect to EVI Console.WriteLine("Connecting to EVI..."); await chatApi.ConnectAsync(); Console.WriteLine("Connected!"); // Wait for Chat Metadata Console.WriteLine("Waiting for Chat Metadata..."); await chatMetadataReceived.Task; Console.WriteLine("Chat Metadata received."); // Configure audio format (48kHz, 16-bit, mono PCM) const int sampleRate = 48000; const int channels = 1; var sessionSettings = new SessionSettings { Audio = new AudioConfiguration { SampleRate = sampleRate, Channels = channels } }; Console.WriteLine("Sending session settings:"); Console.WriteLine($" Encoding: {sessionSettings.Audio?.Encoding}"); Console.WriteLine($" Sample Rate: {sessionSettings.Audio?.SampleRate} Hz"); Console.WriteLine($" Channels: {sessionSettings.Audio?.Channels}"); await chatApi.Send(sessionSettings); Console.WriteLine("Session settings sent successfully."); Console.WriteLine("Starting audio transmission..."); await TransmitTestAudio(chatApi, "sample_input.pcm", sampleRate, channels); // Wait for responses Console.WriteLine("Waiting for responses..."); await Task.Delay(5000); await chatApi.DisposeAsync(); Console.WriteLine("Done"); /// /// Reads a PCM file and streams its audio data to EVI in real-time chunks. /// static async Task TransmitTestAudio(IChatApi chatApi, string filePath, int sampleRate, int channels) { const int chunkDurationMs = 10; const int bytesPerSample = 2; // 16-bit audio int bytesPerChunk = sampleRate * bytesPerSample * channels * chunkDurationMs / 1000; // Step 1: Read PCM file var audioData = File.ReadAllBytes(filePath); Console.WriteLine($"Read {audioData.Length} bytes of audio from {filePath}"); // Step 2: Split into chunks var chunks = SplitAudioIntoChunks(audioData, bytesPerChunk); // Step 3: Send chunks with delays await SendAudioChunksAsync(chatApi, chunks, chunkDurationMs); } static byte[][] SplitAudioIntoChunks(byte[] audioData, int bytesPerChunk) { var chunks = new List(); for (int offset = 0; offset < audioData.Length; offset += bytesPerChunk) { var chunkSize = Math.Min(bytesPerChunk, audioData.Length - offset); var chunk = audioData.Skip(offset).Take(chunkSize).ToArray(); // Pad final chunk if needed if (chunk.Length < bytesPerChunk) { chunk = chunk.Concat(new byte[bytesPerChunk - chunk.Length]).ToArray(); } chunks.Add(chunk); } Console.WriteLine($"Split audio into {chunks.Count} chunks"); return chunks.ToArray(); } static async Task SendAudioChunksAsync(IChatApi chatApi, byte[][] chunks, int chunkDurationMs) { Console.WriteLine($"Sending {chunks.Length} audio chunks..."); var lastLogTime = DateTime.Now; long bytesSent = 0; for (int i = 0; i < chunks.Length; i++) { var data = Convert.ToBase64String(chunks[i]); await chatApi.Send(new AudioInput { Data = data }); bytesSent += chunks[i].Length; // Log progress every 5 seconds var now = DateTime.Now; if ((now - lastLogTime).TotalSeconds >= 5) { Console.WriteLine($"Sent {bytesSent} bytes ({i + 1}/{chunks.Length} chunks)"); lastLogTime = now; } await Task.Delay(chunkDurationMs); } Console.WriteLine("Finished sending audio."); Console.WriteLine($"Total bytes sent: {bytesSent}"); } ================================================ FILE: evi/evi-dotnet-quickstart/README.md ================================================

EVI | C# Quickstart

Jumpstart your development with Hume's Empathic Voice Interface!

## Overview This project demonstrates how to use [Hume AI](https://hume.ai)'s [Empathic Voice Interface (EVI)](https://dev.hume.ai/docs/empathic-voice-interface-evi/overview) with C#. EVI is an emotionally intelligent voice AI that understands and responds to human emotions in real-time. It processes speech with emotional awareness, enabling more natural and empathetic conversations. ## Instructions 1. Clone this examples repository: ```shell git clone https://github.com/humeai/hume-api-examples cd hume-api-examples/evi/evi-dotnet-quickstart ``` 2. Set up your API key: Your API key can be retrieved from the [Hume AI platform](https://app.hume.ai/keys). For detailed instructions, see our documentation on [getting your api keys](https://dev.hume.ai/docs/introduction/api-key). Create a `.env` file in this folder with your API key: ``` HUME_API_KEY=your_api_key_here ``` Or set it as an environment variable: **Windows (Command Prompt):** ```cmd set HUME_API_KEY=your_api_key_here ``` **Windows (PowerShell):** ```powershell $env:HUME_API_KEY="your_api_key_here" ``` **macOS/Linux:** ```bash export HUME_API_KEY=your_api_key_here ``` 3. Install dependencies: ```shell dotnet restore ``` 4. Run the project: ```shell dotnet run ``` ## Features Demonstrated This quickstart demonstrates key features of the EVI API: - **WebSocket Connection**: Establishing a real-time connection to EVI - **Audio Streaming**: Sending audio data in chunks for processing - **Event Handling**: Subscribing to assistant messages, user transcriptions, and audio output - **Session Management**: Configuring audio settings and managing chat metadata ## Requirements - .NET 8.0 or later - A Hume API key ## Output The application connects to EVI, streams your audio file, and displays: - Transcribed user speech - Assistant responses - Audio output notifications ================================================ FILE: evi/evi-dotnet-quickstart/evi-csharp-quickstart.csproj ================================================ Exe net9.0 EviCsharpQuickstart enable enable latest ================================================ FILE: evi/evi-dotnet-quickstart/evi-csharp-quickstart.tests.csproj ================================================ net9.0 EviCsharpQuickstart.Tests enable enable latest false true runtime; build; native; contentfiles; analyzers; buildtransitive all runtime; build; native; contentfiles; analyzers; buildtransitive all ================================================ FILE: evi/evi-flutter/.gitignore ================================================ # Miscellaneous *.class *.log *.pyc *.swp .DS_Store .atom/ .build/ .buildlog/ .history .svn/ .swiftpm/ migrate_working_dir/ # Environment variables related .env # IntelliJ related *.iml *.ipr *.iws .idea/ # The .vscode folder contains launch configuration and tasks you configure in # VS Code which you may wish to be included in version control, so this line # is commented out by default. #.vscode/ # Flutter/Dart/Pub related **/doc/api/ **/ios/Flutter/.last_build_id .dart_tool/ .flutter-plugins .flutter-plugins-dependencies .pub-cache/ .pub/ /build/ # Symbolication related app.*.symbols # Obfuscation related app.*.map.json # Android Studio will place build artifacts here /android/app/debug /android/app/profile /android/app/release /pubspec.lock ios/Podfile.lock ================================================ FILE: evi/evi-flutter/README.md ================================================

Empathic Voice Interface | Sample Flutter App

This project features a sample implementation of Hume's [Empathic Voice Interface](https://dev.hume.ai/docs/empathic-voice-interface-evi/overview) using Flutter. This is lightly adapted from the stater project provided by `flutter create`. **Targets:** The example supports iOS, Android, and Web. **Dependencies:** It uses the [record](https://pub.dev/packages/record) Flutter package for audio recording, and [audioplayers](https://pub.dev/packages/audioplayers) package for playback. ## Instructions 1. Clone this examples repository: ```shell git clone https://github.com/humeai/hume-api-examples cd hume-api-examples/evi/evi-flutter ``` 2. Install Flutter (if needed) following the [official guide](https://docs.flutter.dev/get-started/install). 3. Install dependencies: ```shell flutter pub get ``` 4. Set up your API key: You must authenticate to use the EVI API. Your API key can be retrieved from the [Hume AI platform](https://app.hume.ai/keys). For detailed instructions, see our documentation on [getting your api keys](https://dev.hume.ai/docs/introduction/api-key). This example uses [flutter_dotenv](https://pub.dev/packages/flutter_dotenv). Place your API key in a `.env` file at the root of your project. ```shell echo "HUME_API_KEY=your_api_key_here" > .env ``` You can copy the `.env.example` file to use as a template. **Note:** the `HUME_API_KEY` environment variable is for development only. In a production flutter app you should avoid building your api key into the app -- the client should fetch an access token from an endpoint on your server. You should supply the `MY_SERVER_AUTH_URL` environment variable and uncomment the call to `fetchAccessToken` in `lib/main.dart`. 5. Specify an EVI configuration (Optional): EVI is pre-configured with a set of default values, which are automatically applied if you do not specify a configuration. The default configuration includes a preset voice and language model, but does not include a system prompt or tools. To customize these options, you will need to create and specify your own EVI configuration. To learn more, see our [configuration guide](https://dev.hume.ai/docs/empathic-voice-interface-evi/configuration/build-a-configuration). ```shell echo "HUME_CONFIG_ID=your_config_id_here" >> .env ``` 6. Run the app: ```shell flutter run ``` 7. If you are using the Android emulator, make sure to send audio to the emulator from the host. ![](host-audio-screenshot.png) ## Notes - **Echo cancellation**. Echo cancellation is important for a good user experience using EVI. Without echo cancellation, EVI will detect its own speech as user interruptions, and will cut itself off and become incoherent. This flutter example _requests_ echo cancellation from the browser or the device's operating system, but echo cancellation is hardware-dependent and may not be provided in all environments. - Echo cancellation works consistently on physical iOS devices and on the web. - Echo cancellation works on some physical Android devices. - Echo cancellation doesn't seem to work using the iOS simulator or Android Emulator when forwarding audio from the host. - If you need to test using a simulator or emulator, or in an environment where echo cancellation is not provided, use headphones, or enable the mute button while EVI is speaking. ================================================ FILE: evi/evi-flutter/analysis_options.yaml ================================================ # This file configures the analyzer, which statically analyzes Dart code to # check for errors, warnings, and lints. # # The issues identified by the analyzer are surfaced in the UI of Dart-enabled # IDEs (https://dart.dev/tools#ides-and-editors). The analyzer can also be # invoked from the command line by running `flutter analyze`. # The following line activates a set of recommended lints for Flutter apps, # packages, and plugins designed to encourage good coding practices. include: package:flutter_lints/flutter.yaml linter: # The lint rules applied to this project can be customized in the # section below to disable rules from the `package:flutter_lints/flutter.yaml` # included above or to enable additional rules. A list of all available lints # and their documentation is published at https://dart.dev/lints. # # Instead of disabling a lint rule for the entire project in the # section below, it can also be suppressed for a single line of code # or a specific dart file by using the `// ignore: name_of_lint` and # `// ignore_for_file: name_of_lint` syntax on the line or in the file # producing the lint. rules: # avoid_print: false # Uncomment to disable the `avoid_print` rule # prefer_single_quotes: true # Uncomment to enable the `prefer_single_quotes` rule # Additional information about this file can be found at # https://dart.dev/guides/language/analysis-options ================================================ FILE: evi/evi-flutter/android/.gitignore ================================================ gradle-wrapper.jar /.gradle /captures/ /gradlew /gradlew.bat /local.properties GeneratedPluginRegistrant.java # Remember to never publicly share your keystore. # See https://flutter.dev/to/reference-keystore key.properties **/*.keystore **/*.jks ================================================ FILE: evi/evi-flutter/android/app/build.gradle ================================================ plugins { id "com.android.application" id "kotlin-android" // The Flutter Gradle Plugin must be applied after the Android and Kotlin Gradle plugins. id "dev.flutter.flutter-gradle-plugin" } android { namespace = "com.example.evi_example" compileSdk = flutter.compileSdkVersion ndkVersion = flutter.ndkVersion compileOptions { sourceCompatibility = JavaVersion.VERSION_1_8 targetCompatibility = JavaVersion.VERSION_1_8 } kotlinOptions { jvmTarget = JavaVersion.VERSION_1_8 } defaultConfig { // TODO: Specify your own unique Application ID (https://developer.android.com/studio/build/application-id.html). applicationId = "com.example.evi_example" // You can update the following values to match your application needs. // For more information, see: https://flutter.dev/to/review-gradle-config. minSdk = 23 targetSdk = flutter.targetSdkVersion versionCode = flutter.versionCode versionName = flutter.versionName } buildTypes { release { // TODO: Add your own signing config for the release build. // Signing with the debug keys for now, so `flutter run --release` works. signingConfig = signingConfigs.debug } } } flutter { source = "../.." } ================================================ FILE: evi/evi-flutter/android/app/src/debug/AndroidManifest.xml ================================================ ================================================ FILE: evi/evi-flutter/android/app/src/main/AndroidManifest.xml ================================================ ================================================ FILE: evi/evi-flutter/android/app/src/main/kotlin/com/example/evi_example/MainActivity.kt ================================================ package com.example.evi_example import io.flutter.embedding.android.FlutterActivity class MainActivity: FlutterActivity() ================================================ FILE: evi/evi-flutter/android/app/src/main/res/drawable/launch_background.xml ================================================ ================================================ FILE: evi/evi-flutter/android/app/src/main/res/drawable-v21/launch_background.xml ================================================ ================================================ FILE: evi/evi-flutter/android/app/src/main/res/values/styles.xml ================================================ ================================================ FILE: evi/evi-flutter/android/app/src/main/res/values-night/styles.xml ================================================ ================================================ FILE: evi/evi-flutter/android/app/src/profile/AndroidManifest.xml ================================================ ================================================ FILE: evi/evi-flutter/android/build.gradle ================================================ allprojects { repositories { google() mavenCentral() } } rootProject.buildDir = "../build" subprojects { project.buildDir = "${rootProject.buildDir}/${project.name}" } subprojects { project.evaluationDependsOn(":app") } tasks.register("clean", Delete) { delete rootProject.buildDir } ================================================ FILE: evi/evi-flutter/android/gradle/wrapper/gradle-wrapper.properties ================================================ distributionBase=GRADLE_USER_HOME distributionPath=wrapper/dists zipStoreBase=GRADLE_USER_HOME zipStorePath=wrapper/dists distributionUrl=https\://services.gradle.org/distributions/gradle-8.3-all.zip ================================================ FILE: evi/evi-flutter/android/gradle.properties ================================================ org.gradle.jvmargs=-Xmx4G -XX:MaxMetaspaceSize=2G -XX:+HeapDumpOnOutOfMemoryError android.useAndroidX=true android.enableJetifier=true ================================================ FILE: evi/evi-flutter/android/settings.gradle ================================================ pluginManagement { def flutterSdkPath = { def properties = new Properties() file("local.properties").withInputStream { properties.load(it) } def flutterSdkPath = properties.getProperty("flutter.sdk") assert flutterSdkPath != null, "flutter.sdk not set in local.properties" return flutterSdkPath }() includeBuild("$flutterSdkPath/packages/flutter_tools/gradle") repositories { google() mavenCentral() gradlePluginPortal() } } plugins { id "dev.flutter.flutter-plugin-loader" version "1.0.0" id "com.android.application" version "8.1.0" apply false id "org.jetbrains.kotlin.android" version "1.8.22" apply false } include ":app" ================================================ FILE: evi/evi-flutter/audio/.gitignore ================================================ # Miscellaneous *.class *.log *.pyc *.swp .DS_Store .atom/ .buildlog/ .history .svn/ migrate_working_dir/ # IntelliJ related *.iml *.ipr *.iws .idea/ # The .vscode folder contains launch configuration and tasks you configure in # VS Code which you may wish to be included in version control, so this line # is commented out by default. #.vscode/ # Flutter/Dart/Pub related # Libraries should not include pubspec.lock, per https://dart.dev/guides/libraries/private-files#pubspeclock. /pubspec.lock **/doc/api/ .dart_tool/ build/ ================================================ FILE: evi/evi-flutter/audio/.metadata ================================================ # This file tracks properties of this Flutter project. # Used by Flutter tool to assess capabilities and perform upgrades etc. # # This file should be version controlled and should not be manually edited. version: revision: "nixpkgs000000000000000000000000000000000" channel: "stable" project_type: plugin # Tracks metadata for the flutter migrate command migration: platforms: - platform: root create_revision: nixpkgs000000000000000000000000000000000 base_revision: nixpkgs000000000000000000000000000000000 - platform: ios create_revision: nixpkgs000000000000000000000000000000000 base_revision: nixpkgs000000000000000000000000000000000 # User provided section # List of Local paths (relative to this file) that should be # ignored by the migrate tool. # # Files that are not part of the templates will be ignored by default. unmanaged_files: - 'lib/main.dart' - 'ios/Runner.xcodeproj/project.pbxproj' ================================================ FILE: evi/evi-flutter/audio/ios/.gitignore ================================================ .idea/ .vagrant/ .sconsign.dblite .svn/ .DS_Store *.swp profile DerivedData/ build/ GeneratedPluginRegistrant.h GeneratedPluginRegistrant.m .generated/ *.pbxuser *.mode1v3 *.mode2v3 *.perspectivev3 !default.pbxuser !default.mode1v3 !default.mode2v3 !default.perspectivev3 xcuserdata *.moved-aside *.pyc *sync/ Icon? .tags* /Flutter/Generated.xcconfig /Flutter/ephemeral/ /Flutter/flutter_export_environment.sh ================================================ FILE: evi/evi-flutter/audio/ios/Assets/.gitkeep ================================================ ================================================ FILE: evi/evi-flutter/audio/ios/Classes/AudioPlugin.swift ================================================ import AVFoundation import Flutter import UIKit public class AudioPlugin: NSObject, FlutterPlugin { private lazy var microphone: Microphone = { return Microphone() }() private var soundPlayer: SoundPlayer private var eventChannel: FlutterEventChannel? private var eventSink: FlutterEventSink? private func sendError(_ message: String) { DispatchQueue.main.async { self.eventSink?([ "type": "error", "message": message, ]) } } private func sendAudio(_ base64String: String) { DispatchQueue.main.async { self.eventSink?([ "type": "audio", "data": base64String, ]) } } public static func register(with registrar: FlutterPluginRegistrar) { let methodChannel = FlutterMethodChannel( name: "audio", binaryMessenger: registrar.messenger() ) let eventChannel = FlutterEventChannel( name: "audio/events", binaryMessenger: registrar.messenger() ) let instance = AudioPlugin() registrar.addMethodCallDelegate(instance, channel: methodChannel) eventChannel.setStreamHandler(instance) instance.eventChannel = eventChannel } override init() { self.soundPlayer = SoundPlayer() super.init() self.soundPlayer.onError { [weak self] error in guard let self = self else { return } guard let eventSink = self.eventSink else { return } switch error { case .invalidBase64String: sendError("Invalid base64 string") case .couldNotPlayAudio: sendError("Could not play audio") case .decodeError(let details): sendError(details) } } } public func handle(_ call: FlutterMethodCall, result: @escaping FlutterResult) { switch call.method { case "getPermissions": Task { await getPermissions() } case "startRecording": do { try ensureInittedAudioSession() try microphone.startRecording(onBase64EncodedAudio: sendAudio) result(nil) } catch { result( FlutterError( code: "START_RECORDING_ERROR", message: error.localizedDescription, details: nil ) ) } case "enqueueAudio": guard let base64String = call.arguments as? String else { result( FlutterError( code: "INVALID_ARGUMENTS", message: "Expected base64 string", details: nil )) return } Task { do { try await soundPlayer.enqueueAudio(base64String) } catch { sendError(error.localizedDescription) } } result(nil) case "stopPlayback": soundPlayer.stopPlayback() result(nil) case "stopRecording": microphone.stopRecording() result(nil) default: result(FlutterMethodNotImplemented) } } private func getPermissions() async -> Bool { let audioSession = AVAudioSession.sharedInstance() switch audioSession.recordPermission { case .granted: return true case .denied: return false case .undetermined: return await withCheckedContinuation { continuation in audioSession.requestRecordPermission { granted in continuation.resume(returning: granted) } } @unknown default: sendError("Unknown permission state") return false } } private var inittedAudioSession = false private func ensureInittedAudioSession() throws { if inittedAudioSession { return } let audioSession = AVAudioSession.sharedInstance() try audioSession.setCategory( .playAndRecord, mode: .voiceChat, options: [.defaultToSpeaker, .allowBluetooth, .allowBluetoothA2DP] ) try audioSession.setActive(true) inittedAudioSession = true } } extension AudioPlugin: FlutterStreamHandler { public func onListen( withArguments arguments: Any?, eventSink events: @escaping FlutterEventSink ) -> FlutterError? { self.eventSink = events return nil } public func onCancel(withArguments arguments: Any?) -> FlutterError? { self.eventSink = nil return nil } } ================================================ FILE: evi/evi-flutter/audio/ios/Classes/Microphone.swift ================================================ import AVFoundation import Foundation public enum MicrophoneError: Error { case conversionFailed(details: String) case setupFailed(details: String) } public class Microphone { public static let sampleRate: Double = 44100 public static let isLinear16PCM: Bool = true private static let desiredInputFormat = AVAudioFormat(commonFormat: .pcmFormatInt16, sampleRate: sampleRate, channels: 1, interleaved: false)! private var audioEngine: AVAudioEngine? private var inputNode: AVAudioInputNode? private var isMuted: Bool = false private var onError: ((MicrophoneError) -> Void)? public init() { self.isMuted = false } public func onError(_ onError: @escaping (MicrophoneError) -> Void) { self.onError = onError } public func mute() { self.isMuted = true } public func unmute() { self.isMuted = false } private func setupAudioEngine() throws { self.audioEngine = AVAudioEngine() guard let audioEngine = self.audioEngine else { throw MicrophoneError.setupFailed(details: "Failed to create audio engine") } self.inputNode = audioEngine.inputNode guard let inputNode = self.inputNode else { throw MicrophoneError.setupFailed(details: "Failed to get input node") } let outputNode: AVAudioOutputNode = audioEngine.outputNode let mainMixerNode: AVAudioMixerNode = audioEngine.mainMixerNode audioEngine.connect(mainMixerNode, to: outputNode, format: nil) try inputNode.setVoiceProcessingEnabled(true) try outputNode.setVoiceProcessingEnabled(true) if #available(iOS 17.0, *) { let duckingConfig = AVAudioVoiceProcessingOtherAudioDuckingConfiguration(enableAdvancedDucking: false, duckingLevel: .max) inputNode.voiceProcessingOtherAudioDuckingConfiguration = duckingConfig } } public func startRecording(onBase64EncodedAudio: @escaping (String) -> Void) throws { if audioEngine == nil { try setupAudioEngine() } guard let audioEngine = self.audioEngine, let inputNode = self.inputNode else { throw MicrophoneError.setupFailed(details: "Audio engine not properly initialized") } let nativeInputFormat = inputNode.inputFormat(forBus: 0) let inputBufferSize = UInt32(nativeInputFormat.sampleRate * 0.1) inputNode.installTap(onBus: 0, bufferSize: inputBufferSize, format: nativeInputFormat) { (buffer, time) in let convertedBuffer = AVAudioPCMBuffer(pcmFormat: Microphone.desiredInputFormat, frameCapacity: 1024)! var error: NSError? = nil if self.isMuted { let silence = Data(repeating: 0, count: Int(convertedBuffer.frameCapacity) * Int(convertedBuffer.format.streamDescription.pointee.mBytesPerFrame)) onBase64EncodedAudio(silence.base64EncodedString()) return } let inputAudioConverter = AVAudioConverter(from: nativeInputFormat, to: Microphone.desiredInputFormat)! let status = inputAudioConverter.convert(to: convertedBuffer, error: &error, withInputFrom: {inNumPackets, outStatus in outStatus.pointee = .haveData buffer.frameLength = inNumPackets return buffer }) if status == .haveData { let byteLength = Int(convertedBuffer.frameLength) * Int(convertedBuffer.format.streamDescription.pointee.mBytesPerFrame) let audioData = Data(bytes: convertedBuffer.audioBufferList.pointee.mBuffers.mData!, count: byteLength) let base64String = audioData.base64EncodedString() onBase64EncodedAudio(base64String) return } if error != nil { self.onError?(MicrophoneError.conversionFailed(details: error!.localizedDescription)) return } self.onError?(MicrophoneError.conversionFailed(details: "Unexpected status during audio conversion: \(status)")) } if (!audioEngine.isRunning) { try audioEngine.start() } } public func stopRecording() { audioEngine?.stop() inputNode?.removeTap(onBus: 0) } } ================================================ FILE: evi/evi-flutter/audio/ios/Classes/SoundPlayer.swift ================================================ import AVFoundation import Foundation public enum SoundPlayerError: Error { case invalidBase64String case couldNotPlayAudio case decodeError(details: String) } public class SoundPlayer: NSObject, AVAudioPlayerDelegate { private var audioPlayer: AVAudioPlayer? // EVI can send audio output messages faster than they can be played back. // It is important to buffer them in a queue so as not to cut off a clip of // playing audio with a more recent clip. private var audioQueue: [Data] = [] // Queue for audio segments private var isPlaying: Bool = false // Tracks if audio is currently playing private var onError: ((SoundPlayerError) -> Void)? public func onError(_ onError: @escaping (SoundPlayerError) -> Void) { self.onError = onError } public func stopPlayback() { self.audioPlayer?.stop() self.audioPlayer = nil self.audioQueue.removeAll() // Clear the queue isPlaying = false } public func enqueueAudio(_ base64String: String) async throws { guard let data = Data(base64Encoded: base64String) else { throw SoundPlayerError.invalidBase64String } audioQueue.append(data) // If not already playing, start playback if !isPlaying { do { try playNextInQueue() } catch { if let soundError = error as? SoundPlayerError { self.onError?(soundError) } else { self.onError?(SoundPlayerError.decodeError(details: error.localizedDescription)) } } } } private func playNextInQueue() throws { guard !audioQueue.isEmpty else { isPlaying = false return } isPlaying = true let data = audioQueue.removeFirst() self.audioPlayer = try AVAudioPlayer(data: data, fileTypeHint: AVFileType.wav.rawValue) let session: AVAudioSession = AVAudioSession.sharedInstance() self.audioPlayer!.prepareToPlay() self.audioPlayer!.delegate = self let result = audioPlayer!.play() let isSpeaker = session.currentRoute.outputs.first?.portType == AVAudioSession.Port.builtInSpeaker if isSpeaker { // This is to work around an issue with AVFoundation and voiceProcessing: https://forums.developer.apple.com/forums/thread/721535 self.audioPlayer!.volume = 1.0 try session.overrideOutputAudioPort(.none) try session.overrideOutputAudioPort(.speaker) } if !result { throw SoundPlayerError.couldNotPlayAudio } } public func audioPlayerDidFinishPlaying(_ player: AVAudioPlayer, successfully flag: Bool) { do { try playNextInQueue() } catch { self.onError?(error as! SoundPlayerError) } } public func audioPlayerDecodeErrorDidOccur(_ player: AVAudioPlayer, error: Error?) { self.onError?( SoundPlayerError.decodeError(details: error?.localizedDescription ?? "Unknown error")) } } ================================================ FILE: evi/evi-flutter/audio/ios/Resources/PrivacyInfo.xcprivacy ================================================ NSPrivacyTrackingDomains NSPrivacyAccessedAPITypes NSPrivacyCollectedDataTypes NSPrivacyTracking ================================================ FILE: evi/evi-flutter/audio/ios/audio.podspec ================================================ # # To learn more about a Podspec see http://guides.cocoapods.org/syntax/podspec.html. # Run `pod lib lint audio.podspec` to validate before publishing. # Pod::Spec.new do |s| s.name = 'audio' s.version = '0.0.1' s.summary = 'A new Flutter plugin project.' s.description = <<-DESC A new Flutter plugin project. DESC s.homepage = 'http://example.com' s.license = { :file => '../LICENSE' } s.author = { 'Your Company' => 'email@example.com' } s.source = { :path => '.' } s.source_files = 'Classes/**/*' s.dependency 'Flutter' s.platform = :ios, '13.0' # Flutter.framework does not contain a i386 slice. s.pod_target_xcconfig = { 'DEFINES_MODULE' => 'YES', 'EXCLUDED_ARCHS[sdk=iphonesimulator*]' => 'i386' } s.swift_version = '5.0' # If your plugin requires a privacy manifest, for example if it uses any # required reason APIs, update the PrivacyInfo.xcprivacy file to describe your # plugin's privacy impact, and then uncomment this line. For more information, # see https://developer.apple.com/documentation/bundleresources/privacy_manifest_files # s.resource_bundles = {'audio_privacy' => ['Resources/PrivacyInfo.xcprivacy']} end ================================================ FILE: evi/evi-flutter/audio/lib/audio.dart ================================================ import 'dart:async'; import 'dart:convert'; import 'dart:io' show Platform; import 'package:flutter/foundation.dart' show kIsWeb; import 'package:flutter/services.dart'; import 'package:audio/dart_audio.dart'; class Audio { static final Audio _instance = Audio._internal(); factory Audio() => _instance; static const MethodChannel channel = MethodChannel('audio'); static const EventChannel _eventChannel = EventChannel('audio/events'); DartAudio? _dartAudio; Audio._internal() { if (kIsWeb || !Platform.isIOS) { _dartAudio = DartAudio(); } else { _eventChannel.receiveBroadcastStream().listen( (event) { if (event is Map) { if (event['type'] == 'audio') { final audioData = event['data'] as String; _audioController.add(audioData); } else if (event['type'] == 'error') { final error = event['message'] as String; _audioController.addError(error); } } }, onError: (error) { _audioController.addError(error); }, ); } } final _audioController = StreamController.broadcast(); Stream get audioStream => _audioController.stream; Future startRecording() async { if (_dartAudio != null) { (await _dartAudio!.startRecording()).listen( (data) { _audioController.add(base64Encode(data)); }, onError: (error) { _audioController.addError(error); }, ); } else { try { await channel.invokeMethod('startRecording'); } catch (error) { _audioController.addError(error); } } } Future stopRecording() async { if (_dartAudio != null) { _dartAudio!.stopRecording(); } else { return channel.invokeMethod('stopRecording'); } } Future mute() async { if (_dartAudio != null) { _dartAudio!.mute(); } else { return await channel.invokeMethod('mute'); } } Future unmute() async { if (_dartAudio != null) { _dartAudio!.unmute(); } else { return await channel.invokeMethod('unmute'); } } Future enqueueAudio(String base64String) async { if (_dartAudio != null) { _dartAudio!.enqueueAudioSegment(base64String); } else { print("Invoking enqueueAudio"); return channel.invokeMethod('enqueueAudio', base64String); } } Future stopPlayback() async { if (_dartAudio != null) { _dartAudio!.stopPlayback(); } else { await channel.invokeMethod('stopPlayback'); } } Future dispose() async { _audioController.close(); await _dartAudio?.dispose(); } } ================================================ FILE: evi/evi-flutter/audio/lib/audio_method_channel.dart ================================================ import 'package:flutter/foundation.dart'; import 'package:flutter/services.dart'; import 'audio_platform_interface.dart'; /// An implementation of [AudioPlatform] that uses method channels. class MethodChannelAudio extends AudioPlatform { /// The method channel used to interact with the native platform. @visibleForTesting final methodChannel = const MethodChannel('audio'); @override Future getPlatformVersion() async { final version = await methodChannel.invokeMethod('getPlatformVersion'); return version; } } ================================================ FILE: evi/evi-flutter/audio/lib/audio_platform_interface.dart ================================================ import 'package:plugin_platform_interface/plugin_platform_interface.dart'; import 'audio_method_channel.dart'; abstract class AudioPlatform extends PlatformInterface { /// Constructs a AudioPlatform. AudioPlatform() : super(token: _token); static final Object _token = Object(); static AudioPlatform _instance = MethodChannelAudio(); /// The default instance of [AudioPlatform] to use. /// /// Defaults to [MethodChannelAudio]. static AudioPlatform get instance => _instance; /// Platform-specific implementations should set this with their own /// platform-specific class that extends [AudioPlatform] when /// they register themselves. static set instance(AudioPlatform instance) { PlatformInterface.verifyToken(instance, _token); _instance = instance; } Future getPlatformVersion() { throw UnimplementedError('platformVersion() has not been implemented.'); } } ================================================ FILE: evi/evi-flutter/audio/lib/dart_audio.dart ================================================ import 'dart:async'; import 'dart:convert'; import 'package:audioplayers/audioplayers.dart'; import 'package:record/record.dart'; class DartAudio { // Playback stuff final AudioPlayer _audioPlayer = AudioPlayer(); final List _playbackAudioQueue = []; // Recording stuff final AudioRecorder _recorder = AudioRecorder(); final config = const RecordConfig( encoder: AudioEncoder.pcm16bits, bitRate: 48000 * 2 * 16, // 48000 samples per second * 2 channels (stereo) * 16 bits per sample sampleRate: 48000, numChannels: 1, autoGain: true, echoCancel: true, noiseSuppress: true, ); bool _isMuted = false; bool _isRecording = false; StreamSubscription>? _recordSubscription; DartAudio() { final AudioContext audioContext = AudioContext( android: const AudioContextAndroid( isSpeakerphoneOn: false, audioMode: AndroidAudioMode.normal, stayAwake: false, contentType: AndroidContentType.speech, usageType: AndroidUsageType.voiceCommunication, audioFocus: AndroidAudioFocus.gain, ), ); AudioPlayer.global.setAudioContext(audioContext); _audioPlayer.onPlayerComplete.listen((event) { _playNextAudioSegment(); }); } // ------------------------- // Playback fallback // ------------------------- void enqueueAudioSegment(String base64Bytes) { final audioSegment = BytesSource(base64Decode(base64Bytes)); if (_audioPlayer.state == PlayerState.playing) { _playbackAudioQueue.add(audioSegment); } else { _audioPlayer.play(audioSegment); } } void stopPlayback() { _playbackAudioQueue.clear(); _audioPlayer.stop(); } void _playNextAudioSegment() { if (_playbackAudioQueue.isNotEmpty) { final audioSegment = _playbackAudioQueue.removeAt(0); _audioPlayer.play(audioSegment); } } // ---------------------------------------------------------------- // (A) Recording fallback: returning a Stream of chunked bytes // ---------------------------------------------------------------- /// Starts recording, returning a stream of byte chunks. /// You can specify the config (sampleRate, bitRate, etc.) and a /// "chunkSize" in bytes. Each chunk of raw audio is emitted in the stream. Future>> startRecording() async { if (_isRecording) { throw Exception('Already recording'); } // Request mic permission if (!await _recorder.hasPermission()) { throw Exception('No mic permission'); } // We'll create a StreamController to push chunked data final controller = StreamController>(); // Start streaming from the record package final recordStream = await _recorder.startStream(config); _isRecording = true; _isMuted = false; final audioInputBuffer = []; // Calculate chunk size in bytes, e.g., config.bitRate / 10 for ~100ms final chunkSize = config.bitRate ~/ 10; _recordSubscription = recordStream.listen( (data) { if (!_isMuted) { // If not muted, we add the new data audioInputBuffer.addAll(data); if (audioInputBuffer.length >= chunkSize) { // If the entire chunk is silent, ignore it if you want final bufferWasEmpty = audioInputBuffer.every((byte) => byte == 0); if (!bufferWasEmpty) { // Emit this chunk to the stream controller.add(List.from(audioInputBuffer)); } audioInputBuffer.clear(); } } else { // If muted, optionally do nothing or emit zeros, etc. } }, onError: (err) => controller.addError(err), onDone: () { _isRecording = false; controller.close(); }, ); return controller.stream; } Future stopRecording() async { if (_isRecording) { await _recordSubscription?.cancel(); _recordSubscription = null; await _recorder.stop(); _isRecording = false; _isMuted = false; } } Future mute() async { _isMuted = true; } Future unmute() async { _isMuted = false; } // If you want a simpler "just record to a file," // you could do it in separate methods. But this is // a chunked streaming approach, same as your original code. // ---------------------------------------------------------------- // Cleanup // ---------------------------------------------------------------- Future dispose() async { await _audioPlayer.dispose(); await stopRecording(); // stop + unsub } } ================================================ FILE: evi/evi-flutter/audio/pubspec.yaml ================================================ name: audio description: "A new Flutter plugin project." version: 0.0.1 homepage: environment: sdk: ^3.5.4 flutter: '>=3.3.0' dependencies: flutter: sdk: flutter audioplayers: ^6.1.0 record: ^5.1.2 plugin_platform_interface: ^2.0.2 dev_dependencies: flutter_test: sdk: flutter flutter_lints: ^4.0.0 # For information on the generic Dart part of this file, see the # following page: https://dart.dev/tools/pub/pubspec # The following section is specific to Flutter packages. flutter: # This section identifies this Flutter project as a plugin project. # The 'pluginClass' specifies the class (in Java, Kotlin, Swift, Objective-C, etc.) # which should be registered in the plugin registry. This is required for # using method channels. # The Android 'package' specifies package in which the registered class is. # This is required for using method channels on Android. # The 'ffiPlugin' specifies that native code should be built and bundled. # This is required for using `dart:ffi`. # All these are used by the tooling to maintain consistency when # adding or updating assets for this project. plugin: platforms: ios: pluginClass: AudioPlugin # To add assets to your plugin package, add an assets section, like this: # assets: # - images/a_dot_burr.jpeg # - images/a_dot_ham.jpeg # # For details regarding assets in packages, see # https://flutter.dev/to/asset-from-package # # An image asset can refer to one or more resolution-specific "variants", see # https://flutter.dev/to/resolution-aware-images # To add custom fonts to your plugin package, add a fonts section here, # in this "flutter" section. Each entry in this list should have a # "family" key with the font family name, and a "fonts" key with a # list giving the asset and other descriptors for the font. For # example: # fonts: # - family: Schyler # fonts: # - asset: fonts/Schyler-Regular.ttf # - asset: fonts/Schyler-Italic.ttf # style: italic # - family: Trajan Pro # fonts: # - asset: fonts/TrajanPro.ttf # - asset: fonts/TrajanPro_Bold.ttf # weight: 700 # # For details regarding fonts in packages, see # https://flutter.dev/to/font-from-package ================================================ FILE: evi/evi-flutter/audio/test/audio_method_channel_test.dart ================================================ import 'package:flutter/services.dart'; import 'package:flutter_test/flutter_test.dart'; import 'package:audio/audio_method_channel.dart'; void main() { TestWidgetsFlutterBinding.ensureInitialized(); MethodChannelAudio platform = MethodChannelAudio(); const MethodChannel channel = MethodChannel('audio'); setUp(() { TestDefaultBinaryMessengerBinding.instance.defaultBinaryMessenger.setMockMethodCallHandler( channel, (MethodCall methodCall) async { return '42'; }, ); }); tearDown(() { TestDefaultBinaryMessengerBinding.instance.defaultBinaryMessenger.setMockMethodCallHandler(channel, null); }); test('getPlatformVersion', () async { expect(await platform.getPlatformVersion(), '42'); }); } ================================================ FILE: evi/evi-flutter/audio/test/audio_test.dart ================================================ import 'package:flutter_test/flutter_test.dart'; import 'package:audio/audio.dart'; import 'package:audio/audio_platform_interface.dart'; import 'package:audio/audio_method_channel.dart'; import 'package:plugin_platform_interface/plugin_platform_interface.dart'; class MockAudioPlatform with MockPlatformInterfaceMixin implements AudioPlatform { @override Future getPlatformVersion() => Future.value('42'); } void main() { final AudioPlatform initialPlatform = AudioPlatform.instance; test('$MethodChannelAudio is the default instance', () { expect(initialPlatform, isInstanceOf()); }); test('getPlatformVersion', () async { Audio audioPlugin = Audio(); MockAudioPlatform fakePlatform = MockAudioPlatform(); AudioPlatform.instance = fakePlatform; expect(await audioPlugin.getPlatformVersion(), '42'); }); } ================================================ FILE: evi/evi-flutter/ios/.gitignore ================================================ **/dgph *.mode1v3 *.mode2v3 *.moved-aside *.pbxuser *.perspectivev3 **/*sync/ .sconsign.dblite .tags* **/.vagrant/ **/DerivedData/ Icon? **/Pods/ **/.symlinks/ profile xcuserdata **/.generated/ Flutter/App.framework Flutter/Flutter.framework Flutter/Flutter.podspec Flutter/Generated.xcconfig Flutter/ephemeral/ Flutter/app.flx Flutter/app.zip Flutter/flutter_assets/ Flutter/flutter_export_environment.sh ServiceDefinitions.json Runner/GeneratedPluginRegistrant.* # Exceptions to above rules. !default.mode1v3 !default.mode2v3 !default.pbxuser !default.perspectivev3 ================================================ FILE: evi/evi-flutter/ios/Flutter/AppFrameworkInfo.plist ================================================ CFBundleDevelopmentRegion en CFBundleExecutable App CFBundleIdentifier io.flutter.flutter.app CFBundleInfoDictionaryVersion 6.0 CFBundleName App CFBundlePackageType FMWK CFBundleShortVersionString 1.0 CFBundleSignature ???? CFBundleVersion 1.0 MinimumOSVersion 12.0 ================================================ FILE: evi/evi-flutter/ios/Flutter/Debug.xcconfig ================================================ #include? "Pods/Target Support Files/Pods-Runner/Pods-Runner.debug.xcconfig" #include "Generated.xcconfig" ================================================ FILE: evi/evi-flutter/ios/Flutter/Release.xcconfig ================================================ #include? "Pods/Target Support Files/Pods-Runner/Pods-Runner.release.xcconfig" #include "Generated.xcconfig" ================================================ FILE: evi/evi-flutter/ios/Podfile ================================================ # Uncomment this line to define a global platform for your project # platform :ios, '12.0' # CocoaPods analytics sends network stats synchronously affecting flutter build latency. ENV['COCOAPODS_DISABLE_STATS'] = 'true' project 'Runner', { 'Debug' => :debug, 'Profile' => :release, 'Release' => :release, } def flutter_root generated_xcode_build_settings_path = File.expand_path(File.join('..', 'Flutter', 'Generated.xcconfig'), __FILE__) unless File.exist?(generated_xcode_build_settings_path) raise "#{generated_xcode_build_settings_path} must exist. If you're running pod install manually, make sure flutter pub get is executed first" end File.foreach(generated_xcode_build_settings_path) do |line| matches = line.match(/FLUTTER_ROOT\=(.*)/) return matches[1].strip if matches end raise "FLUTTER_ROOT not found in #{generated_xcode_build_settings_path}. Try deleting Generated.xcconfig, then run flutter pub get" end require File.expand_path(File.join('packages', 'flutter_tools', 'bin', 'podhelper'), flutter_root) flutter_ios_podfile_setup target 'Runner' do use_frameworks! use_modular_headers! flutter_install_all_ios_pods File.dirname(File.realpath(__FILE__)) target 'RunnerTests' do inherit! :search_paths end end post_install do |installer| installer.pods_project.targets.each do |target| flutter_additional_ios_build_settings(target) end end ================================================ FILE: evi/evi-flutter/ios/Runner/AppDelegate.swift ================================================ import Flutter import UIKit @main @objc class AppDelegate: FlutterAppDelegate { override func application( _ application: UIApplication, didFinishLaunchingWithOptions launchOptions: [UIApplication.LaunchOptionsKey: Any]? ) -> Bool { GeneratedPluginRegistrant.register(with: self) return super.application(application, didFinishLaunchingWithOptions: launchOptions) } } ================================================ FILE: evi/evi-flutter/ios/Runner/Assets.xcassets/AppIcon.appiconset/Contents.json ================================================ { "images" : [ { "size" : "20x20", "idiom" : "iphone", "filename" : "Icon-App-20x20@2x.png", "scale" : "2x" }, { "size" : "20x20", "idiom" : "iphone", "filename" : "Icon-App-20x20@3x.png", "scale" : "3x" }, { "size" : "29x29", "idiom" : "iphone", "filename" : "Icon-App-29x29@1x.png", "scale" : "1x" }, { "size" : "29x29", "idiom" : "iphone", "filename" : "Icon-App-29x29@2x.png", "scale" : "2x" }, { "size" : "29x29", "idiom" : "iphone", "filename" : "Icon-App-29x29@3x.png", "scale" : "3x" }, { "size" : "40x40", "idiom" : "iphone", "filename" : "Icon-App-40x40@2x.png", "scale" : "2x" }, { "size" : "40x40", "idiom" : "iphone", "filename" : "Icon-App-40x40@3x.png", "scale" : "3x" }, { "size" : "60x60", "idiom" : "iphone", "filename" : "Icon-App-60x60@2x.png", "scale" : "2x" }, { "size" : "60x60", "idiom" : "iphone", "filename" : "Icon-App-60x60@3x.png", "scale" : "3x" }, { "size" : "20x20", "idiom" : "ipad", "filename" : "Icon-App-20x20@1x.png", "scale" : "1x" }, { "size" : "20x20", "idiom" : "ipad", "filename" : "Icon-App-20x20@2x.png", "scale" : "2x" }, { "size" : "29x29", "idiom" : "ipad", "filename" : "Icon-App-29x29@1x.png", "scale" : "1x" }, { "size" : "29x29", "idiom" : "ipad", "filename" : "Icon-App-29x29@2x.png", "scale" : "2x" }, { "size" : "40x40", "idiom" : "ipad", "filename" : "Icon-App-40x40@1x.png", "scale" : "1x" }, { "size" : "40x40", "idiom" : "ipad", "filename" : "Icon-App-40x40@2x.png", "scale" : "2x" }, { "size" : "76x76", "idiom" : "ipad", "filename" : "Icon-App-76x76@1x.png", "scale" : "1x" }, { "size" : "76x76", "idiom" : "ipad", "filename" : "Icon-App-76x76@2x.png", "scale" : "2x" }, { "size" : "83.5x83.5", "idiom" : "ipad", "filename" : "Icon-App-83.5x83.5@2x.png", "scale" : "2x" }, { "size" : "1024x1024", "idiom" : "ios-marketing", "filename" : "Icon-App-1024x1024@1x.png", "scale" : "1x" } ], "info" : { "version" : 1, "author" : "xcode" } } ================================================ FILE: evi/evi-flutter/ios/Runner/Assets.xcassets/LaunchImage.imageset/Contents.json ================================================ { "images" : [ { "idiom" : "universal", "filename" : "LaunchImage.png", "scale" : "1x" }, { "idiom" : "universal", "filename" : "LaunchImage@2x.png", "scale" : "2x" }, { "idiom" : "universal", "filename" : "LaunchImage@3x.png", "scale" : "3x" } ], "info" : { "version" : 1, "author" : "xcode" } } ================================================ FILE: evi/evi-flutter/ios/Runner/Assets.xcassets/LaunchImage.imageset/README.md ================================================ # Launch Screen Assets You can customize the launch screen with your own desired assets by replacing the image files in this directory. You can also do it by opening your Flutter project's Xcode project with `open ios/Runner.xcworkspace`, selecting `Runner/Assets.xcassets` in the Project Navigator and dropping in the desired images. ================================================ FILE: evi/evi-flutter/ios/Runner/Base.lproj/LaunchScreen.storyboard ================================================ ================================================ FILE: evi/evi-flutter/ios/Runner/Base.lproj/Main.storyboard ================================================ ================================================ FILE: evi/evi-flutter/ios/Runner/Info.plist ================================================ CFBundleDevelopmentRegion $(DEVELOPMENT_LANGUAGE) CFBundleDisplayName Evi Example CFBundleExecutable $(EXECUTABLE_NAME) CFBundleIdentifier $(PRODUCT_BUNDLE_IDENTIFIER) CFBundleInfoDictionaryVersion 6.0 CFBundleName evi_example CFBundlePackageType APPL CFBundleShortVersionString $(FLUTTER_BUILD_NAME) CFBundleSignature ???? CFBundleVersion $(FLUTTER_BUILD_NUMBER) LSRequiresIPhoneOS UILaunchStoryboardName LaunchScreen UIMainStoryboardFile Main UISupportedInterfaceOrientations UIInterfaceOrientationPortrait UIInterfaceOrientationLandscapeLeft UIInterfaceOrientationLandscapeRight UISupportedInterfaceOrientations~ipad UIInterfaceOrientationPortrait UIInterfaceOrientationPortraitUpsideDown UIInterfaceOrientationLandscapeLeft UIInterfaceOrientationLandscapeRight CADisableMinimumFrameDurationOnPhone UIApplicationSupportsIndirectInputEvents NSMicrophoneUsageDescription Transmits speech to a conversational AI ================================================ FILE: evi/evi-flutter/ios/Runner/Runner-Bridging-Header.h ================================================ #import "GeneratedPluginRegistrant.h" ================================================ FILE: evi/evi-flutter/ios/Runner.xcodeproj/project.pbxproj ================================================ // !$*UTF8*$! { archiveVersion = 1; classes = { }; objectVersion = 54; objects = { /* Begin PBXBuildFile section */ 1498D2341E8E89220040F4C2 /* GeneratedPluginRegistrant.m in Sources */ = {isa = PBXBuildFile; fileRef = 1498D2331E8E89220040F4C2 /* GeneratedPluginRegistrant.m */; }; 331C808B294A63AB00263BE5 /* RunnerTests.swift in Sources */ = {isa = PBXBuildFile; fileRef = 331C807B294A618700263BE5 /* RunnerTests.swift */; }; 3B3967161E833CAA004F5970 /* AppFrameworkInfo.plist in Resources */ = {isa = PBXBuildFile; fileRef = 3B3967151E833CAA004F5970 /* AppFrameworkInfo.plist */; }; 711D2FFCE0FE40B34BED4AB3 /* Pods_RunnerTests.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 3CB28A3E128E29A157E09664 /* Pods_RunnerTests.framework */; }; 74858FAF1ED2DC5600515810 /* AppDelegate.swift in Sources */ = {isa = PBXBuildFile; fileRef = 74858FAE1ED2DC5600515810 /* AppDelegate.swift */; }; 97C146FC1CF9000F007C117D /* Main.storyboard in Resources */ = {isa = PBXBuildFile; fileRef = 97C146FA1CF9000F007C117D /* Main.storyboard */; }; 97C146FE1CF9000F007C117D /* Assets.xcassets in Resources */ = {isa = PBXBuildFile; fileRef = 97C146FD1CF9000F007C117D /* Assets.xcassets */; }; 97C147011CF9000F007C117D /* LaunchScreen.storyboard in Resources */ = {isa = PBXBuildFile; fileRef = 97C146FF1CF9000F007C117D /* LaunchScreen.storyboard */; }; CA39E55412833DBA2A92AB4E /* Pods_Runner.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 9E1187B4A5C75EEA06C9F271 /* Pods_Runner.framework */; }; /* End PBXBuildFile section */ /* Begin PBXContainerItemProxy section */ 331C8085294A63A400263BE5 /* PBXContainerItemProxy */ = { isa = PBXContainerItemProxy; containerPortal = 97C146E61CF9000F007C117D /* Project object */; proxyType = 1; remoteGlobalIDString = 97C146ED1CF9000F007C117D; remoteInfo = Runner; }; /* End PBXContainerItemProxy section */ /* Begin PBXCopyFilesBuildPhase section */ 9705A1C41CF9048500538489 /* Embed Frameworks */ = { isa = PBXCopyFilesBuildPhase; buildActionMask = 2147483647; dstPath = ""; dstSubfolderSpec = 10; files = ( ); name = "Embed Frameworks"; runOnlyForDeploymentPostprocessing = 0; }; /* End PBXCopyFilesBuildPhase section */ /* Begin PBXFileReference section */ 08CD5310A37D5E4C76389779 /* Pods-RunnerTests.debug.xcconfig */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = text.xcconfig; name = "Pods-RunnerTests.debug.xcconfig"; path = "Target Support Files/Pods-RunnerTests/Pods-RunnerTests.debug.xcconfig"; sourceTree = ""; }; 1498D2321E8E86230040F4C2 /* GeneratedPluginRegistrant.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = GeneratedPluginRegistrant.h; sourceTree = ""; }; 1498D2331E8E89220040F4C2 /* GeneratedPluginRegistrant.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = GeneratedPluginRegistrant.m; sourceTree = ""; }; 331C807B294A618700263BE5 /* RunnerTests.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = RunnerTests.swift; sourceTree = ""; }; 331C8081294A63A400263BE5 /* RunnerTests.xctest */ = {isa = PBXFileReference; explicitFileType = wrapper.cfbundle; includeInIndex = 0; path = RunnerTests.xctest; sourceTree = BUILT_PRODUCTS_DIR; }; 3B3967151E833CAA004F5970 /* AppFrameworkInfo.plist */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = text.plist.xml; name = AppFrameworkInfo.plist; path = Flutter/AppFrameworkInfo.plist; sourceTree = ""; }; 3CB28A3E128E29A157E09664 /* Pods_RunnerTests.framework */ = {isa = PBXFileReference; explicitFileType = wrapper.framework; includeInIndex = 0; path = Pods_RunnerTests.framework; sourceTree = BUILT_PRODUCTS_DIR; }; 5EB49DBBBD50A5533BF1F17B /* Pods-Runner.profile.xcconfig */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = text.xcconfig; name = "Pods-Runner.profile.xcconfig"; path = "Target Support Files/Pods-Runner/Pods-Runner.profile.xcconfig"; sourceTree = ""; }; 74858FAD1ED2DC5600515810 /* Runner-Bridging-Header.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = "Runner-Bridging-Header.h"; sourceTree = ""; }; 74858FAE1ED2DC5600515810 /* AppDelegate.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = AppDelegate.swift; sourceTree = ""; }; 7AFA3C8E1D35360C0083082E /* Release.xcconfig */ = {isa = PBXFileReference; lastKnownFileType = text.xcconfig; name = Release.xcconfig; path = Flutter/Release.xcconfig; sourceTree = ""; }; 9740EEB21CF90195004384FC /* Debug.xcconfig */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = text.xcconfig; name = Debug.xcconfig; path = Flutter/Debug.xcconfig; sourceTree = ""; }; 9740EEB31CF90195004384FC /* Generated.xcconfig */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = text.xcconfig; name = Generated.xcconfig; path = Flutter/Generated.xcconfig; sourceTree = ""; }; 97C146EE1CF9000F007C117D /* Runner.app */ = {isa = PBXFileReference; explicitFileType = wrapper.application; includeInIndex = 0; path = Runner.app; sourceTree = BUILT_PRODUCTS_DIR; }; 97C146FB1CF9000F007C117D /* Base */ = {isa = PBXFileReference; lastKnownFileType = file.storyboard; name = Base; path = Base.lproj/Main.storyboard; sourceTree = ""; }; 97C146FD1CF9000F007C117D /* Assets.xcassets */ = {isa = PBXFileReference; lastKnownFileType = folder.assetcatalog; path = Assets.xcassets; sourceTree = ""; }; 97C147001CF9000F007C117D /* Base */ = {isa = PBXFileReference; lastKnownFileType = file.storyboard; name = Base; path = Base.lproj/LaunchScreen.storyboard; sourceTree = ""; }; 97C147021CF9000F007C117D /* Info.plist */ = {isa = PBXFileReference; lastKnownFileType = text.plist.xml; path = Info.plist; sourceTree = ""; }; 9E1187B4A5C75EEA06C9F271 /* Pods_Runner.framework */ = {isa = PBXFileReference; explicitFileType = wrapper.framework; includeInIndex = 0; path = Pods_Runner.framework; sourceTree = BUILT_PRODUCTS_DIR; }; AAB04AD19FC40D502B31E905 /* Pods-Runner.release.xcconfig */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = text.xcconfig; name = "Pods-Runner.release.xcconfig"; path = "Target Support Files/Pods-Runner/Pods-Runner.release.xcconfig"; sourceTree = ""; }; C86C8F4A02C79AC1A7BF6F09 /* Pods-Runner.debug.xcconfig */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = text.xcconfig; name = "Pods-Runner.debug.xcconfig"; path = "Target Support Files/Pods-Runner/Pods-Runner.debug.xcconfig"; sourceTree = ""; }; E65E8B38E3E3EC1E0351A6F2 /* Pods-RunnerTests.release.xcconfig */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = text.xcconfig; name = "Pods-RunnerTests.release.xcconfig"; path = "Target Support Files/Pods-RunnerTests/Pods-RunnerTests.release.xcconfig"; sourceTree = ""; }; E6FCCA409E121E10B5878E69 /* Pods-RunnerTests.profile.xcconfig */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = text.xcconfig; name = "Pods-RunnerTests.profile.xcconfig"; path = "Target Support Files/Pods-RunnerTests/Pods-RunnerTests.profile.xcconfig"; sourceTree = ""; }; /* End PBXFileReference section */ /* Begin PBXFrameworksBuildPhase section */ 97C146EB1CF9000F007C117D /* Frameworks */ = { isa = PBXFrameworksBuildPhase; buildActionMask = 2147483647; files = ( CA39E55412833DBA2A92AB4E /* Pods_Runner.framework in Frameworks */, ); runOnlyForDeploymentPostprocessing = 0; }; A9E43356DD9F64ADD0BD1417 /* Frameworks */ = { isa = PBXFrameworksBuildPhase; buildActionMask = 2147483647; files = ( 711D2FFCE0FE40B34BED4AB3 /* Pods_RunnerTests.framework in Frameworks */, ); runOnlyForDeploymentPostprocessing = 0; }; /* End PBXFrameworksBuildPhase section */ /* Begin PBXGroup section */ 331C8082294A63A400263BE5 /* RunnerTests */ = { isa = PBXGroup; children = ( 331C807B294A618700263BE5 /* RunnerTests.swift */, ); path = RunnerTests; sourceTree = ""; }; 630333982710EBFF5EEEF8EB /* Pods */ = { isa = PBXGroup; children = ( C86C8F4A02C79AC1A7BF6F09 /* Pods-Runner.debug.xcconfig */, AAB04AD19FC40D502B31E905 /* Pods-Runner.release.xcconfig */, 5EB49DBBBD50A5533BF1F17B /* Pods-Runner.profile.xcconfig */, 08CD5310A37D5E4C76389779 /* Pods-RunnerTests.debug.xcconfig */, E65E8B38E3E3EC1E0351A6F2 /* Pods-RunnerTests.release.xcconfig */, E6FCCA409E121E10B5878E69 /* Pods-RunnerTests.profile.xcconfig */, ); path = Pods; sourceTree = ""; }; 9740EEB11CF90186004384FC /* Flutter */ = { isa = PBXGroup; children = ( 3B3967151E833CAA004F5970 /* AppFrameworkInfo.plist */, 9740EEB21CF90195004384FC /* Debug.xcconfig */, 7AFA3C8E1D35360C0083082E /* Release.xcconfig */, 9740EEB31CF90195004384FC /* Generated.xcconfig */, ); name = Flutter; sourceTree = ""; }; 97C146E51CF9000F007C117D = { isa = PBXGroup; children = ( 9740EEB11CF90186004384FC /* Flutter */, 97C146F01CF9000F007C117D /* Runner */, 97C146EF1CF9000F007C117D /* Products */, 331C8082294A63A400263BE5 /* RunnerTests */, 630333982710EBFF5EEEF8EB /* Pods */, BDAF68A17B0E5205A5813592 /* Frameworks */, ); sourceTree = ""; }; 97C146EF1CF9000F007C117D /* Products */ = { isa = PBXGroup; children = ( 97C146EE1CF9000F007C117D /* Runner.app */, 331C8081294A63A400263BE5 /* RunnerTests.xctest */, ); name = Products; sourceTree = ""; }; 97C146F01CF9000F007C117D /* Runner */ = { isa = PBXGroup; children = ( 97C146FA1CF9000F007C117D /* Main.storyboard */, 97C146FD1CF9000F007C117D /* Assets.xcassets */, 97C146FF1CF9000F007C117D /* LaunchScreen.storyboard */, 97C147021CF9000F007C117D /* Info.plist */, 1498D2321E8E86230040F4C2 /* GeneratedPluginRegistrant.h */, 1498D2331E8E89220040F4C2 /* GeneratedPluginRegistrant.m */, 74858FAE1ED2DC5600515810 /* AppDelegate.swift */, 74858FAD1ED2DC5600515810 /* Runner-Bridging-Header.h */, ); path = Runner; sourceTree = ""; }; BDAF68A17B0E5205A5813592 /* Frameworks */ = { isa = PBXGroup; children = ( 9E1187B4A5C75EEA06C9F271 /* Pods_Runner.framework */, 3CB28A3E128E29A157E09664 /* Pods_RunnerTests.framework */, ); name = Frameworks; sourceTree = ""; }; /* End PBXGroup section */ /* Begin PBXNativeTarget section */ 331C8080294A63A400263BE5 /* RunnerTests */ = { isa = PBXNativeTarget; buildConfigurationList = 331C8087294A63A400263BE5 /* Build configuration list for PBXNativeTarget "RunnerTests" */; buildPhases = ( 3A5484AA9983D77D1D18672C /* [CP] Check Pods Manifest.lock */, 331C807D294A63A400263BE5 /* Sources */, 331C807F294A63A400263BE5 /* Resources */, A9E43356DD9F64ADD0BD1417 /* Frameworks */, ); buildRules = ( ); dependencies = ( 331C8086294A63A400263BE5 /* PBXTargetDependency */, ); name = RunnerTests; productName = RunnerTests; productReference = 331C8081294A63A400263BE5 /* RunnerTests.xctest */; productType = "com.apple.product-type.bundle.unit-test"; }; 97C146ED1CF9000F007C117D /* Runner */ = { isa = PBXNativeTarget; buildConfigurationList = 97C147051CF9000F007C117D /* Build configuration list for PBXNativeTarget "Runner" */; buildPhases = ( E4B47E4D1D6EF101684F156C /* [CP] Check Pods Manifest.lock */, 9740EEB61CF901F6004384FC /* Run Script */, 97C146EA1CF9000F007C117D /* Sources */, 97C146EB1CF9000F007C117D /* Frameworks */, 97C146EC1CF9000F007C117D /* Resources */, 9705A1C41CF9048500538489 /* Embed Frameworks */, 3B06AD1E1E4923F5004D2608 /* Thin Binary */, 399757031974BA9421307DCF /* [CP] Embed Pods Frameworks */, ); buildRules = ( ); dependencies = ( ); name = Runner; productName = Runner; productReference = 97C146EE1CF9000F007C117D /* Runner.app */; productType = "com.apple.product-type.application"; }; /* End PBXNativeTarget section */ /* Begin PBXProject section */ 97C146E61CF9000F007C117D /* Project object */ = { isa = PBXProject; attributes = { BuildIndependentTargetsInParallel = YES; LastUpgradeCheck = 1510; ORGANIZATIONNAME = ""; TargetAttributes = { 331C8080294A63A400263BE5 = { CreatedOnToolsVersion = 14.0; TestTargetID = 97C146ED1CF9000F007C117D; }; 97C146ED1CF9000F007C117D = { CreatedOnToolsVersion = 7.3.1; LastSwiftMigration = 1100; }; }; }; buildConfigurationList = 97C146E91CF9000F007C117D /* Build configuration list for PBXProject "Runner" */; compatibilityVersion = "Xcode 9.3"; developmentRegion = en; hasScannedForEncodings = 0; knownRegions = ( en, Base, ); mainGroup = 97C146E51CF9000F007C117D; productRefGroup = 97C146EF1CF9000F007C117D /* Products */; projectDirPath = ""; projectRoot = ""; targets = ( 97C146ED1CF9000F007C117D /* Runner */, 331C8080294A63A400263BE5 /* RunnerTests */, ); }; /* End PBXProject section */ /* Begin PBXResourcesBuildPhase section */ 331C807F294A63A400263BE5 /* Resources */ = { isa = PBXResourcesBuildPhase; buildActionMask = 2147483647; files = ( ); runOnlyForDeploymentPostprocessing = 0; }; 97C146EC1CF9000F007C117D /* Resources */ = { isa = PBXResourcesBuildPhase; buildActionMask = 2147483647; files = ( 97C147011CF9000F007C117D /* LaunchScreen.storyboard in Resources */, 3B3967161E833CAA004F5970 /* AppFrameworkInfo.plist in Resources */, 97C146FE1CF9000F007C117D /* Assets.xcassets in Resources */, 97C146FC1CF9000F007C117D /* Main.storyboard in Resources */, ); runOnlyForDeploymentPostprocessing = 0; }; /* End PBXResourcesBuildPhase section */ /* Begin PBXShellScriptBuildPhase section */ 399757031974BA9421307DCF /* [CP] Embed Pods Frameworks */ = { isa = PBXShellScriptBuildPhase; buildActionMask = 2147483647; files = ( ); inputFileListPaths = ( "${PODS_ROOT}/Target Support Files/Pods-Runner/Pods-Runner-frameworks-${CONFIGURATION}-input-files.xcfilelist", ); name = "[CP] Embed Pods Frameworks"; outputFileListPaths = ( "${PODS_ROOT}/Target Support Files/Pods-Runner/Pods-Runner-frameworks-${CONFIGURATION}-output-files.xcfilelist", ); runOnlyForDeploymentPostprocessing = 0; shellPath = /bin/sh; shellScript = "\"${PODS_ROOT}/Target Support Files/Pods-Runner/Pods-Runner-frameworks.sh\"\n"; showEnvVarsInLog = 0; }; 3A5484AA9983D77D1D18672C /* [CP] Check Pods Manifest.lock */ = { isa = PBXShellScriptBuildPhase; buildActionMask = 2147483647; files = ( ); inputFileListPaths = ( ); inputPaths = ( "${PODS_PODFILE_DIR_PATH}/Podfile.lock", "${PODS_ROOT}/Manifest.lock", ); name = "[CP] Check Pods Manifest.lock"; outputFileListPaths = ( ); outputPaths = ( "$(DERIVED_FILE_DIR)/Pods-RunnerTests-checkManifestLockResult.txt", ); runOnlyForDeploymentPostprocessing = 0; shellPath = /bin/sh; shellScript = "diff \"${PODS_PODFILE_DIR_PATH}/Podfile.lock\" \"${PODS_ROOT}/Manifest.lock\" > /dev/null\nif [ $? != 0 ] ; then\n # print error to STDERR\n echo \"error: The sandbox is not in sync with the Podfile.lock. Run 'pod install' or update your CocoaPods installation.\" >&2\n exit 1\nfi\n# This output is used by Xcode 'outputs' to avoid re-running this script phase.\necho \"SUCCESS\" > \"${SCRIPT_OUTPUT_FILE_0}\"\n"; showEnvVarsInLog = 0; }; 3B06AD1E1E4923F5004D2608 /* Thin Binary */ = { isa = PBXShellScriptBuildPhase; alwaysOutOfDate = 1; buildActionMask = 2147483647; files = ( ); inputPaths = ( "${TARGET_BUILD_DIR}/${INFOPLIST_PATH}", ); name = "Thin Binary"; outputPaths = ( ); runOnlyForDeploymentPostprocessing = 0; shellPath = /bin/sh; shellScript = "/bin/sh \"$FLUTTER_ROOT/packages/flutter_tools/bin/xcode_backend.sh\" embed_and_thin"; }; 9740EEB61CF901F6004384FC /* Run Script */ = { isa = PBXShellScriptBuildPhase; alwaysOutOfDate = 1; buildActionMask = 2147483647; files = ( ); inputPaths = ( ); name = "Run Script"; outputPaths = ( ); runOnlyForDeploymentPostprocessing = 0; shellPath = /bin/sh; shellScript = "/bin/sh \"$FLUTTER_ROOT/packages/flutter_tools/bin/xcode_backend.sh\" build"; }; E4B47E4D1D6EF101684F156C /* [CP] Check Pods Manifest.lock */ = { isa = PBXShellScriptBuildPhase; buildActionMask = 2147483647; files = ( ); inputFileListPaths = ( ); inputPaths = ( "${PODS_PODFILE_DIR_PATH}/Podfile.lock", "${PODS_ROOT}/Manifest.lock", ); name = "[CP] Check Pods Manifest.lock"; outputFileListPaths = ( ); outputPaths = ( "$(DERIVED_FILE_DIR)/Pods-Runner-checkManifestLockResult.txt", ); runOnlyForDeploymentPostprocessing = 0; shellPath = /bin/sh; shellScript = "diff \"${PODS_PODFILE_DIR_PATH}/Podfile.lock\" \"${PODS_ROOT}/Manifest.lock\" > /dev/null\nif [ $? != 0 ] ; then\n # print error to STDERR\n echo \"error: The sandbox is not in sync with the Podfile.lock. Run 'pod install' or update your CocoaPods installation.\" >&2\n exit 1\nfi\n# This output is used by Xcode 'outputs' to avoid re-running this script phase.\necho \"SUCCESS\" > \"${SCRIPT_OUTPUT_FILE_0}\"\n"; showEnvVarsInLog = 0; }; /* End PBXShellScriptBuildPhase section */ /* Begin PBXSourcesBuildPhase section */ 331C807D294A63A400263BE5 /* Sources */ = { isa = PBXSourcesBuildPhase; buildActionMask = 2147483647; files = ( 331C808B294A63AB00263BE5 /* RunnerTests.swift in Sources */, ); runOnlyForDeploymentPostprocessing = 0; }; 97C146EA1CF9000F007C117D /* Sources */ = { isa = PBXSourcesBuildPhase; buildActionMask = 2147483647; files = ( 74858FAF1ED2DC5600515810 /* AppDelegate.swift in Sources */, 1498D2341E8E89220040F4C2 /* GeneratedPluginRegistrant.m in Sources */, ); runOnlyForDeploymentPostprocessing = 0; }; /* End PBXSourcesBuildPhase section */ /* Begin PBXTargetDependency section */ 331C8086294A63A400263BE5 /* PBXTargetDependency */ = { isa = PBXTargetDependency; target = 97C146ED1CF9000F007C117D /* Runner */; targetProxy = 331C8085294A63A400263BE5 /* PBXContainerItemProxy */; }; /* End PBXTargetDependency section */ /* Begin PBXVariantGroup section */ 97C146FA1CF9000F007C117D /* Main.storyboard */ = { isa = PBXVariantGroup; children = ( 97C146FB1CF9000F007C117D /* Base */, ); name = Main.storyboard; sourceTree = ""; }; 97C146FF1CF9000F007C117D /* LaunchScreen.storyboard */ = { isa = PBXVariantGroup; children = ( 97C147001CF9000F007C117D /* Base */, ); name = LaunchScreen.storyboard; sourceTree = ""; }; /* End PBXVariantGroup section */ /* Begin XCBuildConfiguration section */ 249021D3217E4FDB00AE95B9 /* Profile */ = { isa = XCBuildConfiguration; buildSettings = { ALWAYS_SEARCH_USER_PATHS = NO; ASSETCATALOG_COMPILER_GENERATE_SWIFT_ASSET_SYMBOL_EXTENSIONS = YES; CLANG_ANALYZER_NONNULL = YES; CLANG_CXX_LANGUAGE_STANDARD = "gnu++0x"; CLANG_CXX_LIBRARY = "libc++"; CLANG_ENABLE_MODULES = YES; CLANG_ENABLE_OBJC_ARC = YES; CLANG_WARN_BLOCK_CAPTURE_AUTORELEASING = YES; CLANG_WARN_BOOL_CONVERSION = YES; CLANG_WARN_COMMA = YES; CLANG_WARN_CONSTANT_CONVERSION = YES; CLANG_WARN_DEPRECATED_OBJC_IMPLEMENTATIONS = YES; CLANG_WARN_DIRECT_OBJC_ISA_USAGE = YES_ERROR; CLANG_WARN_EMPTY_BODY = YES; CLANG_WARN_ENUM_CONVERSION = YES; CLANG_WARN_INFINITE_RECURSION = YES; CLANG_WARN_INT_CONVERSION = YES; CLANG_WARN_NON_LITERAL_NULL_CONVERSION = YES; CLANG_WARN_OBJC_IMPLICIT_RETAIN_SELF = YES; CLANG_WARN_OBJC_LITERAL_CONVERSION = YES; CLANG_WARN_OBJC_ROOT_CLASS = YES_ERROR; CLANG_WARN_RANGE_LOOP_ANALYSIS = YES; CLANG_WARN_STRICT_PROTOTYPES = YES; CLANG_WARN_SUSPICIOUS_MOVE = YES; CLANG_WARN_UNREACHABLE_CODE = YES; CLANG_WARN__DUPLICATE_METHOD_MATCH = YES; "CODE_SIGN_IDENTITY[sdk=iphoneos*]" = "iPhone Developer"; COPY_PHASE_STRIP = NO; DEBUG_INFORMATION_FORMAT = "dwarf-with-dsym"; ENABLE_NS_ASSERTIONS = NO; ENABLE_STRICT_OBJC_MSGSEND = YES; ENABLE_USER_SCRIPT_SANDBOXING = NO; GCC_C_LANGUAGE_STANDARD = gnu99; GCC_NO_COMMON_BLOCKS = YES; GCC_WARN_64_TO_32_BIT_CONVERSION = YES; GCC_WARN_ABOUT_RETURN_TYPE = YES_ERROR; GCC_WARN_UNDECLARED_SELECTOR = YES; GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE; GCC_WARN_UNUSED_FUNCTION = YES; GCC_WARN_UNUSED_VARIABLE = YES; IPHONEOS_DEPLOYMENT_TARGET = 13.0; MTL_ENABLE_DEBUG_INFO = NO; SDKROOT = iphoneos; SUPPORTED_PLATFORMS = iphoneos; TARGETED_DEVICE_FAMILY = "1,2"; VALIDATE_PRODUCT = YES; }; name = Profile; }; 249021D4217E4FDB00AE95B9 /* Profile */ = { isa = XCBuildConfiguration; baseConfigurationReference = 7AFA3C8E1D35360C0083082E /* Release.xcconfig */; buildSettings = { ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon; CLANG_ENABLE_MODULES = YES; CURRENT_PROJECT_VERSION = "$(FLUTTER_BUILD_NUMBER)"; DEVELOPMENT_TEAM = QNW744Q794; ENABLE_BITCODE = NO; INFOPLIST_FILE = Runner/Info.plist; LD_RUNPATH_SEARCH_PATHS = ( "$(inherited)", "@executable_path/Frameworks", ); PRODUCT_BUNDLE_IDENTIFIER = com.example.eviExample; PRODUCT_NAME = "$(TARGET_NAME)"; SWIFT_OBJC_BRIDGING_HEADER = "Runner/Runner-Bridging-Header.h"; SWIFT_VERSION = 5.0; VERSIONING_SYSTEM = "apple-generic"; }; name = Profile; }; 331C8088294A63A400263BE5 /* Debug */ = { isa = XCBuildConfiguration; baseConfigurationReference = 08CD5310A37D5E4C76389779 /* Pods-RunnerTests.debug.xcconfig */; buildSettings = { BUNDLE_LOADER = "$(TEST_HOST)"; CODE_SIGN_STYLE = Automatic; CURRENT_PROJECT_VERSION = 1; GENERATE_INFOPLIST_FILE = YES; MARKETING_VERSION = 1.0; PRODUCT_BUNDLE_IDENTIFIER = com.example.eviExample.RunnerTests; PRODUCT_NAME = "$(TARGET_NAME)"; SWIFT_ACTIVE_COMPILATION_CONDITIONS = DEBUG; SWIFT_OPTIMIZATION_LEVEL = "-Onone"; SWIFT_VERSION = 5.0; TEST_HOST = "$(BUILT_PRODUCTS_DIR)/Runner.app/$(BUNDLE_EXECUTABLE_FOLDER_PATH)/Runner"; }; name = Debug; }; 331C8089294A63A400263BE5 /* Release */ = { isa = XCBuildConfiguration; baseConfigurationReference = E65E8B38E3E3EC1E0351A6F2 /* Pods-RunnerTests.release.xcconfig */; buildSettings = { BUNDLE_LOADER = "$(TEST_HOST)"; CODE_SIGN_STYLE = Automatic; CURRENT_PROJECT_VERSION = 1; GENERATE_INFOPLIST_FILE = YES; MARKETING_VERSION = 1.0; PRODUCT_BUNDLE_IDENTIFIER = com.example.eviExample.RunnerTests; PRODUCT_NAME = "$(TARGET_NAME)"; SWIFT_VERSION = 5.0; TEST_HOST = "$(BUILT_PRODUCTS_DIR)/Runner.app/$(BUNDLE_EXECUTABLE_FOLDER_PATH)/Runner"; }; name = Release; }; 331C808A294A63A400263BE5 /* Profile */ = { isa = XCBuildConfiguration; baseConfigurationReference = E6FCCA409E121E10B5878E69 /* Pods-RunnerTests.profile.xcconfig */; buildSettings = { BUNDLE_LOADER = "$(TEST_HOST)"; CODE_SIGN_STYLE = Automatic; CURRENT_PROJECT_VERSION = 1; GENERATE_INFOPLIST_FILE = YES; MARKETING_VERSION = 1.0; PRODUCT_BUNDLE_IDENTIFIER = com.example.eviExample.RunnerTests; PRODUCT_NAME = "$(TARGET_NAME)"; SWIFT_VERSION = 5.0; TEST_HOST = "$(BUILT_PRODUCTS_DIR)/Runner.app/$(BUNDLE_EXECUTABLE_FOLDER_PATH)/Runner"; }; name = Profile; }; 97C147031CF9000F007C117D /* Debug */ = { isa = XCBuildConfiguration; buildSettings = { ALWAYS_SEARCH_USER_PATHS = NO; ASSETCATALOG_COMPILER_GENERATE_SWIFT_ASSET_SYMBOL_EXTENSIONS = YES; CLANG_ANALYZER_NONNULL = YES; CLANG_CXX_LANGUAGE_STANDARD = "gnu++0x"; CLANG_CXX_LIBRARY = "libc++"; CLANG_ENABLE_MODULES = YES; CLANG_ENABLE_OBJC_ARC = YES; CLANG_WARN_BLOCK_CAPTURE_AUTORELEASING = YES; CLANG_WARN_BOOL_CONVERSION = YES; CLANG_WARN_COMMA = YES; CLANG_WARN_CONSTANT_CONVERSION = YES; CLANG_WARN_DEPRECATED_OBJC_IMPLEMENTATIONS = YES; CLANG_WARN_DIRECT_OBJC_ISA_USAGE = YES_ERROR; CLANG_WARN_EMPTY_BODY = YES; CLANG_WARN_ENUM_CONVERSION = YES; CLANG_WARN_INFINITE_RECURSION = YES; CLANG_WARN_INT_CONVERSION = YES; CLANG_WARN_NON_LITERAL_NULL_CONVERSION = YES; CLANG_WARN_OBJC_IMPLICIT_RETAIN_SELF = YES; CLANG_WARN_OBJC_LITERAL_CONVERSION = YES; CLANG_WARN_OBJC_ROOT_CLASS = YES_ERROR; CLANG_WARN_RANGE_LOOP_ANALYSIS = YES; CLANG_WARN_STRICT_PROTOTYPES = YES; CLANG_WARN_SUSPICIOUS_MOVE = YES; CLANG_WARN_UNREACHABLE_CODE = YES; CLANG_WARN__DUPLICATE_METHOD_MATCH = YES; "CODE_SIGN_IDENTITY[sdk=iphoneos*]" = "iPhone Developer"; COPY_PHASE_STRIP = NO; DEBUG_INFORMATION_FORMAT = dwarf; ENABLE_STRICT_OBJC_MSGSEND = YES; ENABLE_TESTABILITY = YES; ENABLE_USER_SCRIPT_SANDBOXING = NO; GCC_C_LANGUAGE_STANDARD = gnu99; GCC_DYNAMIC_NO_PIC = NO; GCC_NO_COMMON_BLOCKS = YES; GCC_OPTIMIZATION_LEVEL = 0; GCC_PREPROCESSOR_DEFINITIONS = ( "DEBUG=1", "$(inherited)", ); GCC_WARN_64_TO_32_BIT_CONVERSION = YES; GCC_WARN_ABOUT_RETURN_TYPE = YES_ERROR; GCC_WARN_UNDECLARED_SELECTOR = YES; GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE; GCC_WARN_UNUSED_FUNCTION = YES; GCC_WARN_UNUSED_VARIABLE = YES; IPHONEOS_DEPLOYMENT_TARGET = 13.0; MTL_ENABLE_DEBUG_INFO = YES; ONLY_ACTIVE_ARCH = YES; SDKROOT = iphoneos; TARGETED_DEVICE_FAMILY = "1,2"; }; name = Debug; }; 97C147041CF9000F007C117D /* Release */ = { isa = XCBuildConfiguration; buildSettings = { ALWAYS_SEARCH_USER_PATHS = NO; ASSETCATALOG_COMPILER_GENERATE_SWIFT_ASSET_SYMBOL_EXTENSIONS = YES; CLANG_ANALYZER_NONNULL = YES; CLANG_CXX_LANGUAGE_STANDARD = "gnu++0x"; CLANG_CXX_LIBRARY = "libc++"; CLANG_ENABLE_MODULES = YES; CLANG_ENABLE_OBJC_ARC = YES; CLANG_WARN_BLOCK_CAPTURE_AUTORELEASING = YES; CLANG_WARN_BOOL_CONVERSION = YES; CLANG_WARN_COMMA = YES; CLANG_WARN_CONSTANT_CONVERSION = YES; CLANG_WARN_DEPRECATED_OBJC_IMPLEMENTATIONS = YES; CLANG_WARN_DIRECT_OBJC_ISA_USAGE = YES_ERROR; CLANG_WARN_EMPTY_BODY = YES; CLANG_WARN_ENUM_CONVERSION = YES; CLANG_WARN_INFINITE_RECURSION = YES; CLANG_WARN_INT_CONVERSION = YES; CLANG_WARN_NON_LITERAL_NULL_CONVERSION = YES; CLANG_WARN_OBJC_IMPLICIT_RETAIN_SELF = YES; CLANG_WARN_OBJC_LITERAL_CONVERSION = YES; CLANG_WARN_OBJC_ROOT_CLASS = YES_ERROR; CLANG_WARN_RANGE_LOOP_ANALYSIS = YES; CLANG_WARN_STRICT_PROTOTYPES = YES; CLANG_WARN_SUSPICIOUS_MOVE = YES; CLANG_WARN_UNREACHABLE_CODE = YES; CLANG_WARN__DUPLICATE_METHOD_MATCH = YES; "CODE_SIGN_IDENTITY[sdk=iphoneos*]" = "iPhone Developer"; COPY_PHASE_STRIP = NO; DEBUG_INFORMATION_FORMAT = "dwarf-with-dsym"; ENABLE_NS_ASSERTIONS = NO; ENABLE_STRICT_OBJC_MSGSEND = YES; ENABLE_USER_SCRIPT_SANDBOXING = NO; GCC_C_LANGUAGE_STANDARD = gnu99; GCC_NO_COMMON_BLOCKS = YES; GCC_WARN_64_TO_32_BIT_CONVERSION = YES; GCC_WARN_ABOUT_RETURN_TYPE = YES_ERROR; GCC_WARN_UNDECLARED_SELECTOR = YES; GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE; GCC_WARN_UNUSED_FUNCTION = YES; GCC_WARN_UNUSED_VARIABLE = YES; IPHONEOS_DEPLOYMENT_TARGET = 13.0; MTL_ENABLE_DEBUG_INFO = NO; SDKROOT = iphoneos; SUPPORTED_PLATFORMS = iphoneos; SWIFT_COMPILATION_MODE = wholemodule; SWIFT_OPTIMIZATION_LEVEL = "-O"; TARGETED_DEVICE_FAMILY = "1,2"; VALIDATE_PRODUCT = YES; }; name = Release; }; 97C147061CF9000F007C117D /* Debug */ = { isa = XCBuildConfiguration; baseConfigurationReference = 9740EEB21CF90195004384FC /* Debug.xcconfig */; buildSettings = { ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon; CLANG_ENABLE_MODULES = YES; CURRENT_PROJECT_VERSION = "$(FLUTTER_BUILD_NUMBER)"; DEVELOPMENT_TEAM = QNW744Q794; ENABLE_BITCODE = NO; INFOPLIST_FILE = Runner/Info.plist; LD_RUNPATH_SEARCH_PATHS = ( "$(inherited)", "@executable_path/Frameworks", ); PRODUCT_BUNDLE_IDENTIFIER = com.example.eviExample; PRODUCT_NAME = "$(TARGET_NAME)"; SWIFT_OBJC_BRIDGING_HEADER = "Runner/Runner-Bridging-Header.h"; SWIFT_OPTIMIZATION_LEVEL = "-Onone"; SWIFT_VERSION = 5.0; VERSIONING_SYSTEM = "apple-generic"; }; name = Debug; }; 97C147071CF9000F007C117D /* Release */ = { isa = XCBuildConfiguration; baseConfigurationReference = 7AFA3C8E1D35360C0083082E /* Release.xcconfig */; buildSettings = { ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon; CLANG_ENABLE_MODULES = YES; CURRENT_PROJECT_VERSION = "$(FLUTTER_BUILD_NUMBER)"; DEVELOPMENT_TEAM = QNW744Q794; ENABLE_BITCODE = NO; INFOPLIST_FILE = Runner/Info.plist; LD_RUNPATH_SEARCH_PATHS = ( "$(inherited)", "@executable_path/Frameworks", ); PRODUCT_BUNDLE_IDENTIFIER = com.example.eviExample; PRODUCT_NAME = "$(TARGET_NAME)"; SWIFT_OBJC_BRIDGING_HEADER = "Runner/Runner-Bridging-Header.h"; SWIFT_VERSION = 5.0; VERSIONING_SYSTEM = "apple-generic"; }; name = Release; }; /* End XCBuildConfiguration section */ /* Begin XCConfigurationList section */ 331C8087294A63A400263BE5 /* Build configuration list for PBXNativeTarget "RunnerTests" */ = { isa = XCConfigurationList; buildConfigurations = ( 331C8088294A63A400263BE5 /* Debug */, 331C8089294A63A400263BE5 /* Release */, 331C808A294A63A400263BE5 /* Profile */, ); defaultConfigurationIsVisible = 0; defaultConfigurationName = Release; }; 97C146E91CF9000F007C117D /* Build configuration list for PBXProject "Runner" */ = { isa = XCConfigurationList; buildConfigurations = ( 97C147031CF9000F007C117D /* Debug */, 97C147041CF9000F007C117D /* Release */, 249021D3217E4FDB00AE95B9 /* Profile */, ); defaultConfigurationIsVisible = 0; defaultConfigurationName = Release; }; 97C147051CF9000F007C117D /* Build configuration list for PBXNativeTarget "Runner" */ = { isa = XCConfigurationList; buildConfigurations = ( 97C147061CF9000F007C117D /* Debug */, 97C147071CF9000F007C117D /* Release */, 249021D4217E4FDB00AE95B9 /* Profile */, ); defaultConfigurationIsVisible = 0; defaultConfigurationName = Release; }; /* End XCConfigurationList section */ }; rootObject = 97C146E61CF9000F007C117D /* Project object */; } ================================================ FILE: evi/evi-flutter/ios/Runner.xcodeproj/project.xcworkspace/contents.xcworkspacedata ================================================ ================================================ FILE: evi/evi-flutter/ios/Runner.xcodeproj/project.xcworkspace/xcshareddata/IDEWorkspaceChecks.plist ================================================ IDEDidComputeMac32BitWarning ================================================ FILE: evi/evi-flutter/ios/Runner.xcodeproj/project.xcworkspace/xcshareddata/WorkspaceSettings.xcsettings ================================================ PreviewsEnabled ================================================ FILE: evi/evi-flutter/ios/Runner.xcodeproj/xcshareddata/xcschemes/Runner.xcscheme ================================================ ================================================ FILE: evi/evi-flutter/ios/Runner.xcworkspace/contents.xcworkspacedata ================================================ ================================================ FILE: evi/evi-flutter/ios/Runner.xcworkspace/xcshareddata/IDEWorkspaceChecks.plist ================================================ IDEDidComputeMac32BitWarning ================================================ FILE: evi/evi-flutter/ios/Runner.xcworkspace/xcshareddata/WorkspaceSettings.xcsettings ================================================ PreviewsEnabled ================================================ FILE: evi/evi-flutter/ios/RunnerTests/RunnerTests.swift ================================================ import Flutter import UIKit import XCTest class RunnerTests: XCTestCase { func testExample() { // If you add code to the Runner application, consider adding tests here. // See https://developer.apple.com/documentation/xctest for more information about using XCTest. } } ================================================ FILE: evi/evi-flutter/ios/build/ios/XCBuildData/PIFCache/workspace/WORKSPACE@v11_hash=(null)_subobjects=4483c0dac1d2a63621e8a5d74e580a19-json ================================================ {"guid":"dc4b70c03e8043e50e38f2068887b1d4","name":"Pods","path":"/Users/twitchard/dev/hume-api-examples/evi-flutter-example/ios/Pods/Pods.xcodeproj/project.xcworkspace","projects":["PROJECT@v11_mod=1737593281.485423_hash=bfdfe7dc352907fc980b868725387e98plugins=1OJSG6M1FOV3XYQCBH7Z29RZ0FPR9XDE1"]} ================================================ FILE: evi/evi-flutter/lib/chat_card.dart ================================================ import 'dart:core'; import 'package:flutter/material.dart'; import 'theme.dart'; enum Role { user, assistant } class Score { final String emotion; final double score; Score({required this.emotion, required this.score}); Map toJson() { return { 'emotion': emotion, 'score': score, }; } } class ChatEntry { final Role role; final String timestamp; final String content; final List scores; ChatEntry( {required this.role, required this.timestamp, required this.content, required this.scores}); } class ChatCard extends StatelessWidget { final ChatEntry message; const ChatCard({super.key, required this.message}); @override Widget build(BuildContext context) { final alignment = message.role == Role.user ? CrossAxisAlignment.end : CrossAxisAlignment.start; return Padding( padding: const EdgeInsets.symmetric(vertical: 8.0, horizontal: 16.0), child: Align( alignment: message.role == Role.user ? Alignment.centerRight : Alignment.centerLeft, child: Card( elevation: 2, color: message.role == Role.user ? accentBlue200 : white, shape: RoundedRectangleBorder( borderRadius: BorderRadius.circular(8.0), ), child: Padding( padding: const EdgeInsets.all(12.0), child: Column( crossAxisAlignment: alignment, children: [ Text( message.content, style: TextStyle(fontSize: 16), ), const SizedBox(height: 8), Text( message.scores .map((score) => "${score.emotion} (${score.score.toStringAsFixed(1)})") .join(", "), style: TextStyle( fontSize: 12, color: Colors.grey[600], ), ), ], ), ), ), ), ); } } class ChatDisplay extends StatelessWidget { final List entries; const ChatDisplay({super.key, required this.entries}); @override Widget build(BuildContext context) { return Padding( padding: const EdgeInsets.all(16.0), child: ListView.builder( itemCount: entries.length, itemBuilder: (context, index) { return ChatCard(message: entries[index]); }, ), ); } } ================================================ FILE: evi/evi-flutter/lib/evi_message.dart ================================================ import 'dart:convert'; // Represents an incoming message sent from the /v0/evi/chat websocket endpoint of // the Hume API. This example includes only messages and properties that are used in the example. // You should add more messages and properties to this datatype as needed. // See https://hume.docs.buildwithfern.com/reference/empathic-voice-interface-evi/chat/chat#receive // for the full list of messages and their properties. // // You can also use the Typescript SDK as a useful reference: // https://github.com/HumeAI/hume-typescript-sdk/blob/da8820dfef2a30e0745a6ae86987b090a5ba0e6e/src/api/resources/empathicVoice/types/JsonMessage.ts#L7 sealed class EviMessage { final String type; final Map rawJson; EviMessage._(this.type, this.rawJson); factory EviMessage.decode(String text) { final json = jsonDecode(text) as Map; final type = json['type'] as String; switch (type) { case 'error': return ErrorMessage(json); case 'chat_metadata': return ChatMetadataMessage(json); case 'audio_output': return AudioOutputMessage(json); case 'user_interruption': return UserInterruptionMessage(json); case 'assistant_message': return AssistantMessage(json); case 'user_message': return UserMessage(json); default: return UnknownMessage(json); } } } class ErrorMessage extends EviMessage { final String message; ErrorMessage(json) : message = json['message'], super._('chat_metadata', json); } class ChatMetadataMessage extends EviMessage { ChatMetadataMessage(json) : super._('chat_metadata', json); } class AudioOutputMessage extends EviMessage { final String data; AudioOutputMessage(json) : data = json['data'], super._('audio_output', json); } class UserInterruptionMessage extends EviMessage { UserInterruptionMessage(json) : super._('user_interruption', json); } class ChatMessage { final String role; final String content; ChatMessage(json) : role = json['role'], content = json['content']; } class ProsodyInference { final Map scores; ProsodyInference(json) : scores = json['scores'].cast(); } class Inference { final ProsodyInference? prosody; Inference(json) : prosody = ProsodyInference(json['prosody']); } class AssistantMessage extends EviMessage { final ChatMessage message; final Inference models; AssistantMessage(json) : message = ChatMessage(json['message']), models = Inference(json['models']), super._('assistant_message', json); } class UserMessage extends EviMessage { final ChatMessage message; final Inference models; UserMessage(json) : message = ChatMessage(json['message']), models = Inference(json['models']), super._('user_message', json); } class UnknownMessage extends EviMessage { UnknownMessage(json) : super._(json['type'], json); } ================================================ FILE: evi/evi-flutter/lib/main.dart ================================================ import 'dart:convert'; import 'package:flutter/material.dart'; import 'package:web_socket_channel/web_socket_channel.dart'; import 'package:http/http.dart' as http; import 'package:flutter_dotenv/flutter_dotenv.dart'; import 'package:audio/audio.dart'; import 'theme.dart'; import 'chat_card.dart'; import 'evi_message.dart' as evi; class ConfigManager { static final ConfigManager _instance = ConfigManager._internal(); String humeApiKey = ""; String humeAccessToken = ""; late final String humeConfigId; ConfigManager._internal(); static ConfigManager get instance => _instance; // WARNING! For development only. In production, the app should hit your own backend server to get an access token, using "token authentication" (see https://dev.hume.ai/docs/introduction/api-key#token-authentication) String fetchHumeApiKey() { return dotenv.env['HUME_API_KEY'] ?? ""; } Future fetchAccessToken() async { // Make a get request to dotenv.env['MY_SERVER_URL'] to get the access token final authUrl = dotenv.env['MY_SERVER_AUTH_URL']; if (authUrl == null) { throw Exception('Please set MY_SERVER_AUTH_URL in your .env file'); } final url = Uri.parse(authUrl); final response = await http.get(url); if (response.statusCode == 200) { return jsonDecode(response.body)['access_token']; } else { throw Exception('Failed to load access token'); } } Future loadConfig() async { // Make sure to create a .env file in your root directory which mirrors the .env.example file // and add your API key and an optional EVI config ID. await dotenv.load(); // WARNING! For development only. humeApiKey = fetchHumeApiKey(); // Uncomment this to use an access token in production. // humeAccessToken = await fetchAccessToken(); humeConfigId = dotenv.env['HUME_CONFIG_ID'] ?? ''; } } void main() async { // Ensure Flutter binding is initialized before calling asynchronous operations WidgetsFlutterBinding.ensureInitialized(); // Load config in singleton await ConfigManager.instance.loadConfig(); runApp(MyApp()); } class MyApp extends StatelessWidget { const MyApp({super.key}); @override Widget build(BuildContext context) { if (ConfigManager.instance.humeApiKey.isEmpty && ConfigManager.instance.humeAccessToken.isEmpty) { return MaterialApp( title: 'Flutter with EVI', home: ErrorMessage( message: "Error: Please set your Hume API key in main.dart (or use fetchAccessToken)", ), theme: appTheme); } return MaterialApp( title: 'Flutter with EVI', home: MyHomePage(title: 'Flutter with EVI'), theme: appTheme, ); } static List extractTopThreeEmotions(evi.Inference models) { // extract emotion scores from the message final scores = models.prosody?.scores ?? {}; // convert the emotions object into an array of key-value pairs final scoresArray = scores.entries.toList(); // sort the array by the values in descending order scoresArray.sort((a, b) => b.value.compareTo(a.value)); // extract the top three emotions and convert them back to an object final topThreeEmotions = scoresArray.take(3).map((entry) { return Score(emotion: entry.key, score: entry.value); }).toList(); return topThreeEmotions; } } class ErrorMessage extends StatelessWidget { final String message; const ErrorMessage({super.key, required this.message}); @override Widget build(BuildContext context) { return Center( child: Text( message, style: Theme.of(context).textTheme.headlineLarge, ), ); } } class MyHomePage extends StatefulWidget { final String title; const MyHomePage({super.key, required this.title}); @override State createState() => _MyHomePageState(); } class _MyHomePageState extends State { // define config here for recorder final Audio _audio = Audio(); WebSocketChannel? _chatChannel; bool _isConnected = false; bool _isMuted = false; var chatEntries = []; // EVI sends back transcripts of both the user's speech and the assistants speech, along // with an analysis of the emotional content of the speech. This method takes // of a message from EVI, parses it into a `ChatMessage` type and adds it to `chatEntries` so // it can be displayed. void appendNewChatMessage(evi.ChatMessage chatMessage, evi.Inference models) { final role = chatMessage.role == 'assistant' ? Role.assistant : Role.user; final entry = ChatEntry( role: role, timestamp: DateTime.now().toString(), content: chatMessage.content, scores: MyApp.extractTopThreeEmotions(models)); setState(() { chatEntries.add(entry); }); } @override Widget build(BuildContext context) { final muteButton = _isMuted ? ElevatedButton( onPressed: _unmuteInput, child: const Text('Unmute'), ) : ElevatedButton( onPressed: _muteInput, child: const Text('Mute'), ); final connectButton = _isConnected ? ElevatedButton( onPressed: _disconnect, child: const Text('Disconnect'), ) : ElevatedButton( onPressed: _connect, child: const Text('Connect'), ); return Scaffold( appBar: AppBar( backgroundColor: Theme.of(context).colorScheme.inversePrimary, title: Text(widget.title), ), body: Center( child: ConstrainedBox( constraints: BoxConstraints(maxWidth: 600), child: Column( mainAxisAlignment: MainAxisAlignment.center, children: [ Text( 'You are ${_isConnected ? 'connected' : 'disconnected'}', style: const TextStyle( fontSize: 18, fontWeight: FontWeight.bold), ), Expanded(child: ChatDisplay(entries: chatEntries)), Padding( padding: const EdgeInsets.all(8.0), child: Row( mainAxisAlignment: MainAxisAlignment.spaceEvenly, children: [connectButton, muteButton])) ]))), ); } @override void dispose() { _audio.dispose(); super.dispose(); } // Opens a websocket connection to the EVI API and registers a listener to handle // incoming messages. void _connect() { setState(() { _isConnected = true; }); if (ConfigManager.instance.humeApiKey.isNotEmpty && ConfigManager.instance.humeAccessToken.isNotEmpty) { throw Exception( 'Please use either an API key or an access token, not both'); } var uri = 'wss://api.hume.ai/v0/evi/chat'; if (ConfigManager.instance.humeAccessToken.isNotEmpty) { uri += '?access_token=${ConfigManager.instance.humeAccessToken}'; } else if (ConfigManager.instance.humeApiKey.isNotEmpty) { uri += '?api_key=${ConfigManager.instance.humeApiKey}'; } else { throw Exception('Please set your Hume API credentials in main.dart'); } if (ConfigManager.instance.humeConfigId.isNotEmpty) { uri += "&config_id=${ConfigManager.instance.humeConfigId}"; } _chatChannel = WebSocketChannel.connect(Uri.parse(uri)); _chatChannel!.stream.listen( (event) async { final message = evi.EviMessage.decode(event); debugPrint("Received message: ${message.type}"); // This message contains audio data for playback. switch (message) { case (evi.ErrorMessage errorMessage): debugPrint("Error: ${errorMessage.message}"); break; case (evi.ChatMetadataMessage chatMetadataMessage): debugPrint("Chat metadata: ${chatMetadataMessage.rawJson}"); _prepareAudioSettings(); _startRecording(); break; case (evi.AudioOutputMessage audioOutputMessage): _audio.enqueueAudio(audioOutputMessage.data); break; case (evi.UserInterruptionMessage _): _handleInterruption(); break; // These messages contain the transcript text of the user's or the assistant's speech // as well as emotional analysis of the speech. case (evi.AssistantMessage assistantMessage): appendNewChatMessage( assistantMessage.message, assistantMessage.models); break; case (evi.UserMessage userMessage): appendNewChatMessage(userMessage.message, userMessage.models); _handleInterruption(); break; case (evi.UnknownMessage unknownMessage): debugPrint("Unknown message: ${unknownMessage.rawJson}"); break; } }, onError: (error) { debugPrint("Connection error: $error"); _handleConnectionClosed(); }, onDone: () { debugPrint("Connection closed"); _handleConnectionClosed(); }, ); debugPrint("Connected"); } void _disconnect() { _handleConnectionClosed(); _handleInterruption(); _chatChannel?.sink.close(); debugPrint("Disconnected"); } void _handleConnectionClosed() { setState(() { _isConnected = false; }); _stopRecording(); } void _handleInterruption() { _audio.stopPlayback(); } void _muteInput() { _stopRecording(); setState(() { _isMuted = true; }); } void _prepareAudioSettings() { // set session settings to prepare EVI for receiving linear16 encoded audio // https://dev.hume.ai/docs/empathic-voice-interface-evi/configuration#session-settings _chatChannel!.sink.add(jsonEncode({ 'type': 'session_settings', 'audio': { 'encoding': 'linear16', 'sample_rate': 48000, 'channels': 1, }, })); } void _sendAudio(String base64) { _chatChannel!.sink.add(jsonEncode({ 'type': 'audio_input', 'data': base64, })); } void _startRecording() async { await _audio.startRecording(); _audio.audioStream.listen((data) async { _sendAudio(data); }); _audio.audioStream.handleError((error) { debugPrint("Error recording audio: $error"); }); } void _stopRecording() { _audio.stopRecording(); } void _unmuteInput() { _startRecording(); setState(() { _isMuted = false; }); } } ================================================ FILE: evi/evi-flutter/lib/theme.dart ================================================ import 'package:flutter/material.dart'; // From CSS variables on hume.ai const Color white = Color.fromRGBO(255, 255, 255, 1); const Color humeBlack900 = Color.fromRGBO(26, 26, 26, 1); const Color humeTan400 = Color.fromRGBO(255, 244, 232, 1); const Color accentOrange200 = Color.fromRGBO(255, 219, 176, 1); const Color accentBlue200 = Color.fromRGBO(209, 226, 243, 1); ThemeData appTheme = ThemeData( scaffoldBackgroundColor: humeTan400, colorScheme: ColorScheme.light( primary: white, inversePrimary: accentOrange200, surface: humeBlack900, ), ); ================================================ FILE: evi/evi-flutter/pubspec.yaml ================================================ name: evi_example description: "A new Flutter project." # The following line prevents the package from being accidentally published to # pub.dev using `flutter pub publish`. This is preferred for private packages. publish_to: 'none' # Remove this line if you wish to publish to pub.dev # The following defines the version and build number for your application. # A version number is three numbers separated by dots, like 1.2.43 # followed by an optional build number separated by a +. # Both the version and the builder number may be overridden in flutter # build by specifying --build-name and --build-number, respectively. # In Android, build-name is used as versionName while build-number used as versionCode. # Read more about Android versioning at https://developer.android.com/studio/publish/versioning # In iOS, build-name is used as CFBundleShortVersionString while build-number is used as CFBundleVersion. # Read more about iOS versioning at # https://developer.apple.com/library/archive/documentation/General/Reference/InfoPlistKeyReference/Articles/CoreFoundationKeys.html # In Windows, build-name is used as the major, minor, and patch parts # of the product and file versions while build-number is used as the build suffix. version: 1.0.0+1 environment: sdk: ^3.5.1 # Dependencies specify other packages that your package needs in order to work. # To automatically upgrade your package dependencies to the latest versions # consider running `flutter pub upgrade --major-versions`. Alternatively, # dependencies can be manually updated by changing the version numbers below to # the latest version available on pub.dev. To see which dependencies have newer # versions available, run `flutter pub outdated`. dependencies: flutter: sdk: flutter # Supports environment variables flutter_dotenv: ^5.2.1 # The following adds the Cupertino Icons font to your application. # Use with the CupertinoIcons class for iOS style icons. cupertino_icons: ^1.0.8 web_socket_channel: ^3.0.1 record: ^5.1.2 audio: path: ./audio http: ^1.2.2 dev_dependencies: flutter_test: sdk: flutter # The "flutter_lints" package below contains a set of recommended lints to # encourage good coding practices. The lint set provided by the package is # activated in the `analysis_options.yaml` file located at the root of your # package. See that file for information about deactivating specific lint # rules and activating additional ones. flutter_lints: ^5.0.0 # For information on the generic Dart part of this file, see the # following page: https://dart.dev/tools/pub/pubspec # The following section is specific to Flutter packages. flutter: assets: - .env # The following line ensures that the Material Icons font is # included with your application, so that you can use the icons in # the material Icons class. uses-material-design: true # To add assets to your application, add an assets section, like this: # assets: # - images/a_dot_burr.jpeg # - images/a_dot_ham.jpeg # An image asset can refer to one or more resolution-specific "variants", see # https://flutter.dev/to/resolution-aware-images # For details regarding adding assets from package dependencies, see # https://flutter.dev/to/asset-from-package # To add custom fonts to your application, add a fonts section here, # in this "flutter" section. Each entry in this list should have a # "family" key with the font family name, and a "fonts" key with a # list giving the asset and other descriptors for the font. For # example: # fonts: # - family: Schyler # fonts: # - asset: fonts/Schyler-Regular.ttf # - asset: fonts/Schyler-Italic.ttf # style: italic # - family: Trajan Pro # fonts: # - asset: fonts/TrajanPro.ttf # - asset: fonts/TrajanPro_Bold.ttf # weight: 700 # # For details regarding fonts from package dependencies, # see https://flutter.dev/to/font-from-package ================================================ FILE: evi/evi-flutter/test/widget_test.dart ================================================ // This is a basic Flutter widget test. // // To perform an interaction with a widget in your test, use the WidgetTester // utility in the flutter_test package. For example, you can send tap and scroll // gestures. You can also use WidgetTester to find child widgets in the widget // tree, read text, and verify that the values of widget properties are correct. import 'package:flutter/material.dart'; import 'package:flutter_test/flutter_test.dart'; import 'package:evi_example/main.dart'; void main() { testWidgets('Counter increments smoke test', (WidgetTester tester) async { // Build our app and trigger a frame. await tester.pumpWidget(const MyApp()); // Verify that our counter starts at 0. expect(find.text('0'), findsOneWidget); expect(find.text('1'), findsNothing); // Tap the '+' icon and trigger a frame. await tester.tap(find.byIcon(Icons.add)); await tester.pump(); // Verify that our counter has incremented. expect(find.text('0'), findsNothing); expect(find.text('1'), findsOneWidget); }); } ================================================ FILE: evi/evi-flutter/web/index.html ================================================ evi_example ================================================ FILE: evi/evi-flutter/web/manifest.json ================================================ { "name": "evi_example", "short_name": "evi_example", "start_url": ".", "display": "standalone", "background_color": "#0175C2", "theme_color": "#0175C2", "description": "A new Flutter project.", "orientation": "portrait-primary", "prefer_related_applications": false, "icons": [ { "src": "icons/Icon-192.png", "sizes": "192x192", "type": "image/png" }, { "src": "icons/Icon-512.png", "sizes": "512x512", "type": "image/png" }, { "src": "icons/Icon-maskable-192.png", "sizes": "192x192", "type": "image/png", "purpose": "maskable" }, { "src": "icons/Icon-maskable-512.png", "sizes": "512x512", "type": "image/png", "purpose": "maskable" } ] } ================================================ FILE: evi/evi-next-js-app-router-quickstart/.eslintrc.json ================================================ { "extends": "next/core-web-vitals" } ================================================ FILE: evi/evi-next-js-app-router-quickstart/.gitignore ================================================ # See https://help.github.com/articles/ignoring-files/ for more about ignoring files. # dependencies /node_modules /.pnp .pnp.js .yarn/install-state.gz # testing /coverage /test-results/ /playwright-report/ # next.js /.next/ /out/ # production /build # misc .DS_Store *.pem # debug npm-debug.log* yarn-debug.log* yarn-error.log* # local env files .env*.local .env # vercel .vercel # typescript *.tsbuildinfo next-env.d.ts ================================================ FILE: evi/evi-next-js-app-router-quickstart/.prettierrc.json ================================================ {} ================================================ FILE: evi/evi-next-js-app-router-quickstart/README.md ================================================

Empathic Voice Interface | Next.js App Router Quickstart

![preview.png](preview.png) ## Overview This project features a sample implementation of Hume's [Empathic Voice Interface](https://dev.hume.ai/docs/empathic-voice-interface-evi/overview) using Hume's [React SDK](https://github.com/HumeAI/empathic-voice-api-js/tree/main/packages/react). Here, we have a simple EVI that uses the Next.js App Router. See the [Quickstart guide](https://dev.hume.ai/docs/empathic-voice-interface-evi/quickstart/nextjs) for a detailed explanation of the code in this project. ## Project deployment Click the button below to deploy this example project with Vercel: [![Deploy with Vercel](https://vercel.com/button)](https://vercel.com/new/clone?repository-url=https%3A%2F%2Fgithub.com%2Fhumeai%2Fhume-evi-next-js-starter&env=HUME_API_KEY,HUME_CLIENT_SECRET) Below are the steps to completing deployment: 1. Create a Git Repository for your project. 2. Provide the required environment variables. To get your API key and Secret key, log into the Hume AI Platform and visit the [API keys page](https://app.hume.ai/keys). ## Modify the project 1. Clone this examples repository: ```shell git clone https://github.com/humeai/hume-api-examples cd hume-api-examples/evi/evi-next-js-app-router-quickstart ``` 2. Install dependencies: ```shell npm install ``` 3. Set up your API key and Secret key: In order to make an authenticated connection we will first need to generate an access token. Doing so will require your API key and Secret key. These keys can be obtained by logging into the Hume AI Platform and visiting the [API keys page](https://app.hume.ai/keys). For detailed instructions, see our documentation on [getting your api keys](https://dev.hume.ai/docs/introduction/api-key). Place your `HUME_API_KEY` and `HUME_SECRET_KEY` in a `.env` file at the root of your project. ```shell echo "HUME_API_KEY=your_api_key_here" > .env echo "HUME_SECRET_KEY=your_secret_key_here" >> .env ``` You can copy the `.env.example` file to use as a template. 4. Specify an EVI configuration (Optional): EVI is pre-configured with a set of default values, which are automatically applied if you do not specify a configuration. The default configuration includes a preset voice and language model, but does not include a system prompt or tools. To customize these options, you will need to create and specify your own EVI configuration. To learn more, see our [configuration guide](https://dev.hume.ai/docs/empathic-voice-interface-evi/configuration/build-a-configuration). Pass in a configuration ID to the `connect` method inside the [components/StartCall.tsx file](https://github.com/HumeAI/hume-api-examples/blob/main/evi/evi-next-js-app-router-quickstart/components/StartCall.tsx). ```tsx connect({ auth: { type: "accessToken", value: accessToken }, configId: "" }) ``` 5. Run the project: ```shell npm run dev ``` ================================================ FILE: evi/evi-next-js-app-router-quickstart/app/actions/set-llm-key.ts ================================================ "use server"; import { HumeClient } from "hume"; const hume = new HumeClient({ apiKey: process.env.HUME_API_KEY!, }); export async function setLlmKeyForChat(chatId: string) { const languageModelApiKey = process.env.SUPPLEMENTAL_LLM_API_KEY; if (!languageModelApiKey) return; await hume.empathicVoice.controlPlane.send(chatId, { type: "session_settings", languageModelApiKey, }); } ================================================ FILE: evi/evi-next-js-app-router-quickstart/app/api-key/page.tsx ================================================ import ChatLoader from "@/components/ChatLoader"; export const dynamic = "force-dynamic"; export const revalidate = 0; export default async function ApiKeyPage() { const apiKey = process.env.HUME_API_KEY; if (!apiKey?.trim()) { throw new Error("The HUME_API_KEY environment variable is not set."); } return (
); } ================================================ FILE: evi/evi-next-js-app-router-quickstart/app/error.tsx ================================================ "use client"; export default function Error() { return (

An unexpected error occurred

Please try again later

); } ================================================ FILE: evi/evi-next-js-app-router-quickstart/app/globals.css ================================================ @import "tailwindcss"; /* Register theme tokens for Tailwind v4 so utilities like border-border, font-sans work */ @theme { --color-border: hsl(var(--border)); --color-input: hsl(var(--input)); --color-ring: hsl(var(--ring)); --color-background: hsl(var(--background)); --color-foreground: hsl(var(--foreground)); --color-primary: hsl(var(--primary)); --color-primary-foreground: hsl(var(--primary-foreground)); --color-secondary: hsl(var(--secondary)); --color-secondary-foreground: hsl(var(--secondary-foreground)); --color-destructive: hsl(var(--destructive)); --color-destructive-foreground: hsl(var(--destructive-foreground)); --color-muted: hsl(var(--muted)); --color-muted-foreground: hsl(var(--muted-foreground)); --color-accent: hsl(var(--accent)); --color-accent-foreground: hsl(var(--accent-foreground)); --color-popover: hsl(var(--popover)); --color-popover-foreground: hsl(var(--popover-foreground)); --color-card: hsl(var(--card)); --color-card-foreground: hsl(var(--card-foreground)); --font-sans: var(--font-geist-sans), ui-sans-serif, system-ui, sans-serif, "Apple Color Emoji", "Segoe UI Emoji", "Segoe UI Symbol", "Noto Color Emoji"; --font-mono: var(--font-geist-mono), ui-monospace, "SF Mono", Menlo, Consolas, monospace; --radius-lg: var(--radius); --radius-md: calc(var(--radius) - 2px); --radius-sm: calc(var(--radius) - 4px); } @layer base { :root { --background: 0 0% 100%; --foreground: 240 10% 3.9%; --card: 0 0% 100%; --card-foreground: 240 10% 3.9%; --popover: 0 0% 100%; --popover-foreground: 240 10% 3.9%; --primary: 240 5.9% 10%; --primary-foreground: 0 0% 98%; --secondary: 240 4.8% 95.9%; --secondary-foreground: 240 5.9% 10%; --muted: 240 4.8% 95.9%; --muted-foreground: 240 3.8% 46.1%; --accent: 240 4.8% 95.9%; --accent-foreground: 240 5.9% 10%; --destructive: 0 84.2% 60.2%; --destructive-foreground: 0 0% 98%; --border: 240 5.9% 90%; --input: 240 5.9% 90%; --ring: 240 5.9% 10%; --radius: 0.5rem; } .dark { --background: 240 10% 3.9%; --foreground: 0 0% 98%; --card: 240 10% 3.9%; --card-foreground: 0 0% 98%; --popover: 240 10% 3.9%; --popover-foreground: 0 0% 98%; --primary: 0 0% 98%; --primary-foreground: 240 5.9% 10%; --secondary: 240 3.7% 15.9%; --secondary-foreground: 0 0% 98%; --muted: 240 3.7% 15.9%; --muted-foreground: 240 5% 64.9%; --accent: 240 3.7% 15.9%; --accent-foreground: 0 0% 98%; --destructive: 0 62.8% 30.6%; --destructive-foreground: 0 0% 98%; --border: 240 3.7% 15.9%; --input: 240 3.7% 15.9%; --ring: 240 4.9% 83.9%; } } @layer base { * { @apply border-border font-sans; } body { @apply bg-background text-foreground; } } ================================================ FILE: evi/evi-next-js-app-router-quickstart/app/layout.tsx ================================================ import type { Metadata } from "next"; import { GeistSans } from "geist/font/sans"; import { GeistMono } from "geist/font/mono"; import "./globals.css"; import { Nav } from "@/components/Nav"; import { cn } from "@/utils"; export const metadata: Metadata = { title: "Hume AI - EVI - Next.js Starter", description: "A Next.js starter using Hume AI's Empathic Voice Interface", }; export default function RootLayout({ children, }: Readonly<{ children: React.ReactNode; }>) { return (