Repository: AudioKit/Waveform
Branch: main
Commit: 357de1945a36
Files: 31
Total size: 55.0 KB
Directory structure:
gitextract_iqm9pz60/
├── .github/
│ └── workflows/
│ └── tests.yml
├── .gitignore
├── .spi.yml
├── .swiftpm/
│ └── xcode/
│ ├── package.xcworkspace/
│ │ └── contents.xcworkspacedata
│ └── xcshareddata/
│ └── xcschemes/
│ └── Waveform.xcscheme
├── Demo/
│ ├── WaveformDemo/
│ │ ├── Assets.xcassets/
│ │ │ ├── AccentColor.colorset/
│ │ │ │ └── Contents.json
│ │ │ ├── AppIcon.appiconset/
│ │ │ │ └── Contents.json
│ │ │ └── Contents.json
│ │ ├── ContentView.swift
│ │ ├── MinimapView.swift
│ │ ├── Preview Content/
│ │ │ └── Preview Assets.xcassets/
│ │ │ └── Contents.json
│ │ ├── WaveformDemo.entitlements
│ │ └── WaveformDemoApp.swift
│ └── WaveformDemo.xcodeproj/
│ ├── project.pbxproj
│ └── project.xcworkspace/
│ ├── contents.xcworkspacedata
│ └── xcshareddata/
│ └── IDEWorkspaceChecks.plist
├── LICENSE
├── Package.swift
├── README.md
├── Sources/
│ └── Waveform/
│ ├── AVAudio+FloatData.swift
│ ├── Helpers.swift
│ ├── Renderer.swift
│ ├── SampleBuffer.swift
│ ├── Waveform.docc/
│ │ └── Waveform.md
│ ├── Waveform.metal
│ └── Waveform.swift
├── Tests/
│ └── WaveformTests/
│ ├── MTLTexture+Image.swift
│ ├── WaveformTests.swift
│ └── beat.aiff
└── Waveform.playground/
├── Contents.swift
└── contents.xcplayground
================================================
FILE CONTENTS
================================================
================================================
FILE: .github/workflows/tests.yml
================================================
name: Tests
on:
workflow_dispatch:
push:
branches: [main]
pull_request:
branches: [main]
jobs:
build:
name: Build ${{ matrix.scheme }} (Xcode ${{ matrix.xcode_version }})
# NOTE: macos-latest is NOT equivalent to macos-12 as of September 2022.
# Source: https://docs.github.com/en/actions/using-github-hosted-runners/about-github-hosted-runners#supported-runners-and-hardware-resources
runs-on: macos-12
strategy:
# Setting 'fail-fast' to 'true' implies the build will fail the first time it encounters an error.
fail-fast: false
matrix:
xcode_version:
- '13.2' # swift 5.5
- '13.4' # swift 5.6
steps:
- uses: actions/checkout@v3
- name: build
run: swift build
# Send notification to Discord on failure.
send_notification:
name: Send Notification
uses: AudioKit/ci/.github/workflows/send_notification.yml@main
needs: [build]
if: ${{ failure() && github.ref == 'refs/heads/main' }}
secrets: inherit
================================================
FILE: .gitignore
================================================
# Xcode
#
# gitignore contributors: remember to update Global/Xcode.gitignore, Objective-C.gitignore & Swift.gitignore
## User settings
xcuserdata/
## compatibility with Xcode 8 and earlier (ignoring not required starting Xcode 9)
*.xcscmblueprint
*.xccheckout
## compatibility with Xcode 3 and earlier (ignoring not required starting Xcode 4)
build/
DerivedData/
*.moved-aside
*.pbxuser
!default.pbxuser
*.mode1v3
!default.mode1v3
*.mode2v3
!default.mode2v3
*.perspectivev3
!default.perspectivev3
## Obj-C/Swift specific
*.hmap
## App packaging
*.ipa
*.dSYM.zip
*.dSYM
## Playgrounds
timeline.xctimeline
playground.xcworkspace
# Swift Package Manager
#
# Add this line if you want to avoid checking in source code from Swift Package Manager dependencies.
# Packages/
# Package.pins
# Package.resolved
# *.xcodeproj
#
# Xcode automatically generates this directory with a .xcworkspacedata file and xcuserdata
# hence it is not needed unless you have added a package configuration file to your project
# .swiftpm
.build/
# CocoaPods
#
# We recommend against adding the Pods directory to your .gitignore. However
# you should judge for yourself, the pros and cons are mentioned at:
# https://guides.cocoapods.org/using/using-cocoapods.html#should-i-check-the-pods-directory-into-source-control
#
# Pods/
#
# Add this line if you want to avoid checking in source code from the Xcode workspace
# *.xcworkspace
# Carthage
#
# Add this line if you want to avoid checking in source code from Carthage dependencies.
# Carthage/Checkouts
Carthage/Build/
# Accio dependency management
Dependencies/
.accio/
# fastlane
#
# It is recommended to not store the screenshots in the git repo.
# Instead, use fastlane to re-generate the screenshots whenever they are needed.
# For more information about the recommended setup visit:
# https://docs.fastlane.tools/best-practices/source-control/#source-control
fastlane/report.xml
fastlane/Preview.html
fastlane/screenshots/**/*.png
fastlane/test_output
# Code Injection
#
# After new code Injection tools there's a generated folder /iOSInjectionProject
# https://github.com/johnno1962/injectionforxcode
iOSInjectionProject/
.DS_Store
================================================
FILE: .spi.yml
================================================
version: 1
builder:
configs:
- documentation_targets: [Waveform]
================================================
FILE: .swiftpm/xcode/package.xcworkspace/contents.xcworkspacedata
================================================
<?xml version="1.0" encoding="UTF-8"?>
<Workspace
version = "1.0">
<FileRef
location = "self:">
</FileRef>
</Workspace>
================================================
FILE: .swiftpm/xcode/xcshareddata/xcschemes/Waveform.xcscheme
================================================
<?xml version="1.0" encoding="UTF-8"?>
<Scheme
LastUpgradeVersion = "1400"
version = "1.3">
<BuildAction
parallelizeBuildables = "YES"
buildImplicitDependencies = "YES">
<BuildActionEntries>
<BuildActionEntry
buildForTesting = "YES"
buildForRunning = "YES"
buildForProfiling = "YES"
buildForArchiving = "YES"
buildForAnalyzing = "YES">
<BuildableReference
BuildableIdentifier = "primary"
BlueprintIdentifier = "Waveform_Waveform"
BuildableName = "Waveform_Waveform"
BlueprintName = "Waveform_Waveform"
ReferencedContainer = "container:">
</BuildableReference>
</BuildActionEntry>
<BuildActionEntry
buildForTesting = "YES"
buildForRunning = "YES"
buildForProfiling = "YES"
buildForArchiving = "YES"
buildForAnalyzing = "YES">
<BuildableReference
BuildableIdentifier = "primary"
BlueprintIdentifier = "Waveform_WaveformTests"
BuildableName = "Waveform_WaveformTests"
BlueprintName = "Waveform_WaveformTests"
ReferencedContainer = "container:">
</BuildableReference>
</BuildActionEntry>
<BuildActionEntry
buildForTesting = "YES"
buildForRunning = "YES"
buildForProfiling = "YES"
buildForArchiving = "YES"
buildForAnalyzing = "YES">
<BuildableReference
BuildableIdentifier = "primary"
BlueprintIdentifier = "Waveform"
BuildableName = "Waveform"
BlueprintName = "Waveform"
ReferencedContainer = "container:">
</BuildableReference>
</BuildActionEntry>
<BuildActionEntry
buildForTesting = "YES"
buildForRunning = "YES"
buildForProfiling = "NO"
buildForArchiving = "NO"
buildForAnalyzing = "YES">
<BuildableReference
BuildableIdentifier = "primary"
BlueprintIdentifier = "WaveformTests"
BuildableName = "WaveformTests"
BlueprintName = "WaveformTests"
ReferencedContainer = "container:">
</BuildableReference>
</BuildActionEntry>
</BuildActionEntries>
</BuildAction>
<TestAction
buildConfiguration = "Debug"
selectedDebuggerIdentifier = "Xcode.DebuggerFoundation.Debugger.LLDB"
selectedLauncherIdentifier = "Xcode.DebuggerFoundation.Launcher.LLDB"
shouldUseLaunchSchemeArgsEnv = "NO">
<EnvironmentVariables>
<EnvironmentVariable
key = "METAL_DEVICE_WRAPPER_TYPE"
value = "1"
isEnabled = "YES">
</EnvironmentVariable>
</EnvironmentVariables>
<Testables>
<TestableReference
skipped = "NO">
<BuildableReference
BuildableIdentifier = "primary"
BlueprintIdentifier = "WaveformTests"
BuildableName = "WaveformTests"
BlueprintName = "WaveformTests"
ReferencedContainer = "container:">
</BuildableReference>
</TestableReference>
</Testables>
</TestAction>
<LaunchAction
buildConfiguration = "Debug"
selectedDebuggerIdentifier = "Xcode.DebuggerFoundation.Debugger.LLDB"
selectedLauncherIdentifier = "Xcode.DebuggerFoundation.Launcher.LLDB"
launchStyle = "0"
useCustomWorkingDirectory = "NO"
ignoresPersistentStateOnLaunch = "NO"
debugDocumentVersioning = "YES"
debugServiceExtension = "internal"
allowLocationSimulation = "YES">
</LaunchAction>
<ProfileAction
buildConfiguration = "Release"
shouldUseLaunchSchemeArgsEnv = "YES"
savedToolIdentifier = ""
useCustomWorkingDirectory = "NO"
debugDocumentVersioning = "YES">
<MacroExpansion>
<BuildableReference
BuildableIdentifier = "primary"
BlueprintIdentifier = "Waveform_Waveform"
BuildableName = "Waveform_Waveform"
BlueprintName = "Waveform_Waveform"
ReferencedContainer = "container:">
</BuildableReference>
</MacroExpansion>
</ProfileAction>
<AnalyzeAction
buildConfiguration = "Debug">
</AnalyzeAction>
<ArchiveAction
buildConfiguration = "Release"
revealArchiveInOrganizer = "YES">
</ArchiveAction>
</Scheme>
================================================
FILE: Demo/WaveformDemo/Assets.xcassets/AccentColor.colorset/Contents.json
================================================
{
"colors" : [
{
"idiom" : "universal"
}
],
"info" : {
"author" : "xcode",
"version" : 1
}
}
================================================
FILE: Demo/WaveformDemo/Assets.xcassets/AppIcon.appiconset/Contents.json
================================================
{
"images" : [
{
"idiom" : "universal",
"platform" : "ios",
"size" : "1024x1024"
},
{
"idiom" : "mac",
"scale" : "1x",
"size" : "16x16"
},
{
"idiom" : "mac",
"scale" : "2x",
"size" : "16x16"
},
{
"idiom" : "mac",
"scale" : "1x",
"size" : "32x32"
},
{
"idiom" : "mac",
"scale" : "2x",
"size" : "32x32"
},
{
"idiom" : "mac",
"scale" : "1x",
"size" : "128x128"
},
{
"idiom" : "mac",
"scale" : "2x",
"size" : "128x128"
},
{
"idiom" : "mac",
"scale" : "1x",
"size" : "256x256"
},
{
"idiom" : "mac",
"scale" : "2x",
"size" : "256x256"
},
{
"idiom" : "mac",
"scale" : "1x",
"size" : "512x512"
},
{
"idiom" : "mac",
"scale" : "2x",
"size" : "512x512"
}
],
"info" : {
"author" : "xcode",
"version" : 1
}
}
================================================
FILE: Demo/WaveformDemo/Assets.xcassets/Contents.json
================================================
{
"info" : {
"author" : "xcode",
"version" : 1
}
}
================================================
FILE: Demo/WaveformDemo/ContentView.swift
================================================
// Copyright AudioKit. All Rights Reserved. Revision History at http://github.com/AudioKit/Waveform/
import AVFoundation
import SwiftUI
import Waveform
class WaveformDemoModel: ObservableObject {
var samples: SampleBuffer
init(file: AVAudioFile) {
let stereo = file.floatChannelData()!
samples = SampleBuffer(samples: stereo[0])
}
}
func getFile() -> AVAudioFile {
let url = Bundle.main.url(forResource: "Piano", withExtension: "mp3")!
return try! AVAudioFile(forReading: url)
}
func clamp(_ x: Double, _ inf: Double, _ sup: Double) -> Double {
max(min(x, sup), inf)
}
struct ContentView: View {
@StateObject var model = WaveformDemoModel(file: getFile())
@State var start = 0.0
@State var length = 1.0
let formatter = NumberFormatter()
var body: some View {
VStack {
ZStack(alignment: .leading) {
Waveform(samples: model.samples).foregroundColor(.cyan)
.padding(.vertical, 5)
MinimapView(start: $start, length: $length)
}
.frame(height: 100)
.padding()
Waveform(samples: model.samples,
start: Int(start * Double(model.samples.count - 1)),
length: Int(length * Double(model.samples.count)))
.foregroundColor(.blue)
}
.padding()
}
}
================================================
FILE: Demo/WaveformDemo/MinimapView.swift
================================================
// Copyright AudioKit. All Rights Reserved. Revision History at http://github.com/AudioKit/Waveform/
import AVFoundation
import SwiftUI
import Waveform
struct MinimapView: View {
@Binding var start: Double
@Binding var length: Double
@GestureState var initialStart: Double?
@GestureState var initialLength: Double?
let indicatorSize = 10.0
var body: some View {
GeometryReader { gp in
RoundedRectangle(cornerRadius: indicatorSize)
.frame(width: length * gp.size.width)
.offset(x: start * gp.size.width)
.opacity(0.3)
.gesture(DragGesture()
.updating($initialStart) { _, state, _ in
if state == nil {
state = start
}
}
.onChanged { drag in
if let initialStart = initialStart {
start = clamp(initialStart + drag.translation.width / gp.size.width, 0, 1 - length)
}
}
)
RoundedRectangle(cornerRadius: indicatorSize)
.foregroundColor(.white)
.frame(width: indicatorSize).opacity(0.3)
.offset(x: (start + length) * gp.size.width)
.padding(indicatorSize)
.gesture(DragGesture()
.updating($initialLength) { _, state, _ in
if state == nil {
state = length
}
}
.onChanged { drag in
if let initialLength = initialLength {
length = clamp(initialLength + drag.translation.width / gp.size.width, 0, 1 - start)
}
}
)
}
}
}
================================================
FILE: Demo/WaveformDemo/Preview Content/Preview Assets.xcassets/Contents.json
================================================
{
"info" : {
"author" : "xcode",
"version" : 1
}
}
================================================
FILE: Demo/WaveformDemo/WaveformDemo.entitlements
================================================
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0">
<dict>
<key>com.apple.security.app-sandbox</key>
<true/>
<key>com.apple.security.files.user-selected.read-only</key>
<true/>
</dict>
</plist>
================================================
FILE: Demo/WaveformDemo/WaveformDemoApp.swift
================================================
import SwiftUI
@main
struct WaveformDemoApp: App {
var body: some Scene {
WindowGroup {
ContentView()
}
}
}
================================================
FILE: Demo/WaveformDemo.xcodeproj/project.pbxproj
================================================
// !$*UTF8*$!
{
archiveVersion = 1;
classes = {
};
objectVersion = 56;
objects = {
/* Begin PBXBuildFile section */
29D479382A13DBED0033DF90 /* Piano.mp3 in Resources */ = {isa = PBXBuildFile; fileRef = 29D479372A13DBEC0033DF90 /* Piano.mp3 */; };
F130C47828EBF1390017B2AF /* MinimapView.swift in Sources */ = {isa = PBXBuildFile; fileRef = F130C47728EBF1390017B2AF /* MinimapView.swift */; };
F1A202B428DEA41E007CD919 /* WaveformDemoApp.swift in Sources */ = {isa = PBXBuildFile; fileRef = F1A202B328DEA41E007CD919 /* WaveformDemoApp.swift */; };
F1A202B628DEA41E007CD919 /* ContentView.swift in Sources */ = {isa = PBXBuildFile; fileRef = F1A202B528DEA41E007CD919 /* ContentView.swift */; };
F1A202B828DEA420007CD919 /* Assets.xcassets in Resources */ = {isa = PBXBuildFile; fileRef = F1A202B728DEA420007CD919 /* Assets.xcassets */; };
F1A202BC28DEA420007CD919 /* Preview Assets.xcassets in Resources */ = {isa = PBXBuildFile; fileRef = F1A202BB28DEA420007CD919 /* Preview Assets.xcassets */; };
F1A202C628DEA96C007CD919 /* Waveform in Frameworks */ = {isa = PBXBuildFile; productRef = F1A202C528DEA96C007CD919 /* Waveform */; };
F1FB409728E6660E00AFA732 /* beat.aiff in Resources */ = {isa = PBXBuildFile; fileRef = F1FB409628E6660E00AFA732 /* beat.aiff */; };
/* End PBXBuildFile section */
/* Begin PBXFileReference section */
29D479372A13DBEC0033DF90 /* Piano.mp3 */ = {isa = PBXFileReference; lastKnownFileType = audio.mp3; name = Piano.mp3; path = ../../Tests/WaveformTests/Piano.mp3; sourceTree = "<group>"; };
F130C47728EBF1390017B2AF /* MinimapView.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = MinimapView.swift; sourceTree = "<group>"; };
F1A202B028DEA41E007CD919 /* WaveformDemo.app */ = {isa = PBXFileReference; explicitFileType = wrapper.application; includeInIndex = 0; path = WaveformDemo.app; sourceTree = BUILT_PRODUCTS_DIR; };
F1A202B328DEA41E007CD919 /* WaveformDemoApp.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = WaveformDemoApp.swift; sourceTree = "<group>"; };
F1A202B528DEA41E007CD919 /* ContentView.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ContentView.swift; sourceTree = "<group>"; };
F1A202B728DEA420007CD919 /* Assets.xcassets */ = {isa = PBXFileReference; lastKnownFileType = folder.assetcatalog; path = Assets.xcassets; sourceTree = "<group>"; };
F1A202B928DEA420007CD919 /* WaveformDemo.entitlements */ = {isa = PBXFileReference; lastKnownFileType = text.plist.entitlements; path = WaveformDemo.entitlements; sourceTree = "<group>"; };
F1A202BB28DEA420007CD919 /* Preview Assets.xcassets */ = {isa = PBXFileReference; lastKnownFileType = folder.assetcatalog; path = "Preview Assets.xcassets"; sourceTree = "<group>"; };
F1A202C328DEA7BC007CD919 /* Waveform */ = {isa = PBXFileReference; lastKnownFileType = wrapper; name = Waveform; path = ..; sourceTree = "<group>"; };
F1FB409628E6660E00AFA732 /* beat.aiff */ = {isa = PBXFileReference; lastKnownFileType = audio.aiff; name = beat.aiff; path = ../../Tests/WaveformTests/beat.aiff; sourceTree = "<group>"; };
/* End PBXFileReference section */
/* Begin PBXFrameworksBuildPhase section */
F1A202AD28DEA41E007CD919 /* Frameworks */ = {
isa = PBXFrameworksBuildPhase;
buildActionMask = 2147483647;
files = (
F1A202C628DEA96C007CD919 /* Waveform in Frameworks */,
);
runOnlyForDeploymentPostprocessing = 0;
};
/* End PBXFrameworksBuildPhase section */
/* Begin PBXGroup section */
F1A202A728DEA41E007CD919 = {
isa = PBXGroup;
children = (
F1A202C228DEA7BC007CD919 /* Packages */,
F1A202B228DEA41E007CD919 /* WaveformDemo */,
F1A202B128DEA41E007CD919 /* Products */,
F1A202C428DEA96C007CD919 /* Frameworks */,
);
sourceTree = "<group>";
};
F1A202B128DEA41E007CD919 /* Products */ = {
isa = PBXGroup;
children = (
F1A202B028DEA41E007CD919 /* WaveformDemo.app */,
);
name = Products;
sourceTree = "<group>";
};
F1A202B228DEA41E007CD919 /* WaveformDemo */ = {
isa = PBXGroup;
children = (
29D479372A13DBEC0033DF90 /* Piano.mp3 */,
F1A202B328DEA41E007CD919 /* WaveformDemoApp.swift */,
F1FB409628E6660E00AFA732 /* beat.aiff */,
F1A202B528DEA41E007CD919 /* ContentView.swift */,
F130C47728EBF1390017B2AF /* MinimapView.swift */,
F1A202B728DEA420007CD919 /* Assets.xcassets */,
F1A202B928DEA420007CD919 /* WaveformDemo.entitlements */,
F1A202BA28DEA420007CD919 /* Preview Content */,
);
path = WaveformDemo;
sourceTree = "<group>";
};
F1A202BA28DEA420007CD919 /* Preview Content */ = {
isa = PBXGroup;
children = (
F1A202BB28DEA420007CD919 /* Preview Assets.xcassets */,
);
path = "Preview Content";
sourceTree = "<group>";
};
F1A202C228DEA7BC007CD919 /* Packages */ = {
isa = PBXGroup;
children = (
F1A202C328DEA7BC007CD919 /* Waveform */,
);
name = Packages;
sourceTree = "<group>";
};
F1A202C428DEA96C007CD919 /* Frameworks */ = {
isa = PBXGroup;
children = (
);
name = Frameworks;
sourceTree = "<group>";
};
/* End PBXGroup section */
/* Begin PBXNativeTarget section */
F1A202AF28DEA41E007CD919 /* WaveformDemo */ = {
isa = PBXNativeTarget;
buildConfigurationList = F1A202BF28DEA420007CD919 /* Build configuration list for PBXNativeTarget "WaveformDemo" */;
buildPhases = (
F1A202AC28DEA41E007CD919 /* Sources */,
F1A202AD28DEA41E007CD919 /* Frameworks */,
F1A202AE28DEA41E007CD919 /* Resources */,
);
buildRules = (
);
dependencies = (
);
name = WaveformDemo;
packageProductDependencies = (
F1A202C528DEA96C007CD919 /* Waveform */,
);
productName = WaveformDemo;
productReference = F1A202B028DEA41E007CD919 /* WaveformDemo.app */;
productType = "com.apple.product-type.application";
};
/* End PBXNativeTarget section */
/* Begin PBXProject section */
F1A202A828DEA41E007CD919 /* Project object */ = {
isa = PBXProject;
attributes = {
BuildIndependentTargetsInParallel = 1;
LastSwiftUpdateCheck = 1400;
LastUpgradeCheck = 1400;
TargetAttributes = {
F1A202AF28DEA41E007CD919 = {
CreatedOnToolsVersion = 14.0;
};
};
};
buildConfigurationList = F1A202AB28DEA41E007CD919 /* Build configuration list for PBXProject "WaveformDemo" */;
compatibilityVersion = "Xcode 14.0";
developmentRegion = en;
hasScannedForEncodings = 0;
knownRegions = (
en,
Base,
);
mainGroup = F1A202A728DEA41E007CD919;
productRefGroup = F1A202B128DEA41E007CD919 /* Products */;
projectDirPath = "";
projectRoot = "";
targets = (
F1A202AF28DEA41E007CD919 /* WaveformDemo */,
);
};
/* End PBXProject section */
/* Begin PBXResourcesBuildPhase section */
F1A202AE28DEA41E007CD919 /* Resources */ = {
isa = PBXResourcesBuildPhase;
buildActionMask = 2147483647;
files = (
F1FB409728E6660E00AFA732 /* beat.aiff in Resources */,
F1A202BC28DEA420007CD919 /* Preview Assets.xcassets in Resources */,
29D479382A13DBED0033DF90 /* Piano.mp3 in Resources */,
F1A202B828DEA420007CD919 /* Assets.xcassets in Resources */,
);
runOnlyForDeploymentPostprocessing = 0;
};
/* End PBXResourcesBuildPhase section */
/* Begin PBXSourcesBuildPhase section */
F1A202AC28DEA41E007CD919 /* Sources */ = {
isa = PBXSourcesBuildPhase;
buildActionMask = 2147483647;
files = (
F1A202B628DEA41E007CD919 /* ContentView.swift in Sources */,
F1A202B428DEA41E007CD919 /* WaveformDemoApp.swift in Sources */,
F130C47828EBF1390017B2AF /* MinimapView.swift in Sources */,
);
runOnlyForDeploymentPostprocessing = 0;
};
/* End PBXSourcesBuildPhase section */
/* Begin XCBuildConfiguration section */
F1A202BD28DEA420007CD919 /* Debug */ = {
isa = XCBuildConfiguration;
buildSettings = {
ALWAYS_SEARCH_USER_PATHS = NO;
CLANG_ANALYZER_NONNULL = YES;
CLANG_ANALYZER_NUMBER_OBJECT_CONVERSION = YES_AGGRESSIVE;
CLANG_CXX_LANGUAGE_STANDARD = "gnu++20";
CLANG_ENABLE_MODULES = YES;
CLANG_ENABLE_OBJC_ARC = YES;
CLANG_ENABLE_OBJC_WEAK = YES;
CLANG_WARN_BLOCK_CAPTURE_AUTORELEASING = YES;
CLANG_WARN_BOOL_CONVERSION = YES;
CLANG_WARN_COMMA = YES;
CLANG_WARN_CONSTANT_CONVERSION = YES;
CLANG_WARN_DEPRECATED_OBJC_IMPLEMENTATIONS = YES;
CLANG_WARN_DIRECT_OBJC_ISA_USAGE = YES_ERROR;
CLANG_WARN_DOCUMENTATION_COMMENTS = YES;
CLANG_WARN_EMPTY_BODY = YES;
CLANG_WARN_ENUM_CONVERSION = YES;
CLANG_WARN_INFINITE_RECURSION = YES;
CLANG_WARN_INT_CONVERSION = YES;
CLANG_WARN_NON_LITERAL_NULL_CONVERSION = YES;
CLANG_WARN_OBJC_IMPLICIT_RETAIN_SELF = YES;
CLANG_WARN_OBJC_LITERAL_CONVERSION = YES;
CLANG_WARN_OBJC_ROOT_CLASS = YES_ERROR;
CLANG_WARN_QUOTED_INCLUDE_IN_FRAMEWORK_HEADER = YES;
CLANG_WARN_RANGE_LOOP_ANALYSIS = YES;
CLANG_WARN_STRICT_PROTOTYPES = YES;
CLANG_WARN_SUSPICIOUS_MOVE = YES;
CLANG_WARN_UNGUARDED_AVAILABILITY = YES_AGGRESSIVE;
CLANG_WARN_UNREACHABLE_CODE = YES;
CLANG_WARN__DUPLICATE_METHOD_MATCH = YES;
COPY_PHASE_STRIP = NO;
DEBUG_INFORMATION_FORMAT = dwarf;
ENABLE_STRICT_OBJC_MSGSEND = YES;
ENABLE_TESTABILITY = YES;
GCC_C_LANGUAGE_STANDARD = gnu11;
GCC_DYNAMIC_NO_PIC = NO;
GCC_NO_COMMON_BLOCKS = YES;
GCC_OPTIMIZATION_LEVEL = 0;
GCC_PREPROCESSOR_DEFINITIONS = (
"DEBUG=1",
"$(inherited)",
);
GCC_WARN_64_TO_32_BIT_CONVERSION = YES;
GCC_WARN_ABOUT_RETURN_TYPE = YES_ERROR;
GCC_WARN_UNDECLARED_SELECTOR = YES;
GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE;
GCC_WARN_UNUSED_FUNCTION = YES;
GCC_WARN_UNUSED_VARIABLE = YES;
MTL_ENABLE_DEBUG_INFO = INCLUDE_SOURCE;
MTL_FAST_MATH = YES;
ONLY_ACTIVE_ARCH = YES;
SWIFT_ACTIVE_COMPILATION_CONDITIONS = DEBUG;
SWIFT_OPTIMIZATION_LEVEL = "-Onone";
};
name = Debug;
};
F1A202BE28DEA420007CD919 /* Release */ = {
isa = XCBuildConfiguration;
buildSettings = {
ALWAYS_SEARCH_USER_PATHS = NO;
CLANG_ANALYZER_NONNULL = YES;
CLANG_ANALYZER_NUMBER_OBJECT_CONVERSION = YES_AGGRESSIVE;
CLANG_CXX_LANGUAGE_STANDARD = "gnu++20";
CLANG_ENABLE_MODULES = YES;
CLANG_ENABLE_OBJC_ARC = YES;
CLANG_ENABLE_OBJC_WEAK = YES;
CLANG_WARN_BLOCK_CAPTURE_AUTORELEASING = YES;
CLANG_WARN_BOOL_CONVERSION = YES;
CLANG_WARN_COMMA = YES;
CLANG_WARN_CONSTANT_CONVERSION = YES;
CLANG_WARN_DEPRECATED_OBJC_IMPLEMENTATIONS = YES;
CLANG_WARN_DIRECT_OBJC_ISA_USAGE = YES_ERROR;
CLANG_WARN_DOCUMENTATION_COMMENTS = YES;
CLANG_WARN_EMPTY_BODY = YES;
CLANG_WARN_ENUM_CONVERSION = YES;
CLANG_WARN_INFINITE_RECURSION = YES;
CLANG_WARN_INT_CONVERSION = YES;
CLANG_WARN_NON_LITERAL_NULL_CONVERSION = YES;
CLANG_WARN_OBJC_IMPLICIT_RETAIN_SELF = YES;
CLANG_WARN_OBJC_LITERAL_CONVERSION = YES;
CLANG_WARN_OBJC_ROOT_CLASS = YES_ERROR;
CLANG_WARN_QUOTED_INCLUDE_IN_FRAMEWORK_HEADER = YES;
CLANG_WARN_RANGE_LOOP_ANALYSIS = YES;
CLANG_WARN_STRICT_PROTOTYPES = YES;
CLANG_WARN_SUSPICIOUS_MOVE = YES;
CLANG_WARN_UNGUARDED_AVAILABILITY = YES_AGGRESSIVE;
CLANG_WARN_UNREACHABLE_CODE = YES;
CLANG_WARN__DUPLICATE_METHOD_MATCH = YES;
COPY_PHASE_STRIP = NO;
DEBUG_INFORMATION_FORMAT = "dwarf-with-dsym";
ENABLE_NS_ASSERTIONS = NO;
ENABLE_STRICT_OBJC_MSGSEND = YES;
GCC_C_LANGUAGE_STANDARD = gnu11;
GCC_NO_COMMON_BLOCKS = YES;
GCC_WARN_64_TO_32_BIT_CONVERSION = YES;
GCC_WARN_ABOUT_RETURN_TYPE = YES_ERROR;
GCC_WARN_UNDECLARED_SELECTOR = YES;
GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE;
GCC_WARN_UNUSED_FUNCTION = YES;
GCC_WARN_UNUSED_VARIABLE = YES;
MTL_ENABLE_DEBUG_INFO = NO;
MTL_FAST_MATH = YES;
SWIFT_COMPILATION_MODE = wholemodule;
SWIFT_OPTIMIZATION_LEVEL = "-O";
};
name = Release;
};
F1A202C028DEA420007CD919 /* Debug */ = {
isa = XCBuildConfiguration;
buildSettings = {
ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon;
ASSETCATALOG_COMPILER_GLOBAL_ACCENT_COLOR_NAME = AccentColor;
CODE_SIGN_ENTITLEMENTS = WaveformDemo/WaveformDemo.entitlements;
CODE_SIGN_STYLE = Automatic;
CURRENT_PROJECT_VERSION = 1;
DEVELOPMENT_ASSET_PATHS = "\"WaveformDemo/Preview Content\"";
ENABLE_PREVIEWS = YES;
GENERATE_INFOPLIST_FILE = YES;
"INFOPLIST_KEY_UIApplicationSceneManifest_Generation[sdk=iphoneos*]" = YES;
"INFOPLIST_KEY_UIApplicationSceneManifest_Generation[sdk=iphonesimulator*]" = YES;
"INFOPLIST_KEY_UIApplicationSupportsIndirectInputEvents[sdk=iphoneos*]" = YES;
"INFOPLIST_KEY_UIApplicationSupportsIndirectInputEvents[sdk=iphonesimulator*]" = YES;
"INFOPLIST_KEY_UILaunchScreen_Generation[sdk=iphoneos*]" = YES;
"INFOPLIST_KEY_UILaunchScreen_Generation[sdk=iphonesimulator*]" = YES;
"INFOPLIST_KEY_UIStatusBarStyle[sdk=iphoneos*]" = UIStatusBarStyleDefault;
"INFOPLIST_KEY_UIStatusBarStyle[sdk=iphonesimulator*]" = UIStatusBarStyleDefault;
INFOPLIST_KEY_UISupportedInterfaceOrientations_iPad = "UIInterfaceOrientationPortrait UIInterfaceOrientationPortraitUpsideDown UIInterfaceOrientationLandscapeLeft UIInterfaceOrientationLandscapeRight";
INFOPLIST_KEY_UISupportedInterfaceOrientations_iPhone = "UIInterfaceOrientationPortrait UIInterfaceOrientationLandscapeLeft UIInterfaceOrientationLandscapeRight";
IPHONEOS_DEPLOYMENT_TARGET = 15.0;
LD_RUNPATH_SEARCH_PATHS = "@executable_path/Frameworks";
"LD_RUNPATH_SEARCH_PATHS[sdk=macosx*]" = "@executable_path/../Frameworks";
MACOSX_DEPLOYMENT_TARGET = 12.0;
MARKETING_VERSION = 1.0;
PRODUCT_BUNDLE_IDENTIFIER = io.audiokit.WaveformDemo;
PRODUCT_NAME = "$(TARGET_NAME)";
SDKROOT = auto;
SUPPORTED_PLATFORMS = "iphoneos iphonesimulator macosx";
SWIFT_EMIT_LOC_STRINGS = YES;
SWIFT_VERSION = 5.0;
TARGETED_DEVICE_FAMILY = "1,2";
};
name = Debug;
};
F1A202C128DEA420007CD919 /* Release */ = {
isa = XCBuildConfiguration;
buildSettings = {
ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon;
ASSETCATALOG_COMPILER_GLOBAL_ACCENT_COLOR_NAME = AccentColor;
CODE_SIGN_ENTITLEMENTS = WaveformDemo/WaveformDemo.entitlements;
CODE_SIGN_STYLE = Automatic;
CURRENT_PROJECT_VERSION = 1;
DEVELOPMENT_ASSET_PATHS = "\"WaveformDemo/Preview Content\"";
ENABLE_PREVIEWS = YES;
GENERATE_INFOPLIST_FILE = YES;
"INFOPLIST_KEY_UIApplicationSceneManifest_Generation[sdk=iphoneos*]" = YES;
"INFOPLIST_KEY_UIApplicationSceneManifest_Generation[sdk=iphonesimulator*]" = YES;
"INFOPLIST_KEY_UIApplicationSupportsIndirectInputEvents[sdk=iphoneos*]" = YES;
"INFOPLIST_KEY_UIApplicationSupportsIndirectInputEvents[sdk=iphonesimulator*]" = YES;
"INFOPLIST_KEY_UILaunchScreen_Generation[sdk=iphoneos*]" = YES;
"INFOPLIST_KEY_UILaunchScreen_Generation[sdk=iphonesimulator*]" = YES;
"INFOPLIST_KEY_UIStatusBarStyle[sdk=iphoneos*]" = UIStatusBarStyleDefault;
"INFOPLIST_KEY_UIStatusBarStyle[sdk=iphonesimulator*]" = UIStatusBarStyleDefault;
INFOPLIST_KEY_UISupportedInterfaceOrientations_iPad = "UIInterfaceOrientationPortrait UIInterfaceOrientationPortraitUpsideDown UIInterfaceOrientationLandscapeLeft UIInterfaceOrientationLandscapeRight";
INFOPLIST_KEY_UISupportedInterfaceOrientations_iPhone = "UIInterfaceOrientationPortrait UIInterfaceOrientationLandscapeLeft UIInterfaceOrientationLandscapeRight";
IPHONEOS_DEPLOYMENT_TARGET = 15.0;
LD_RUNPATH_SEARCH_PATHS = "@executable_path/Frameworks";
"LD_RUNPATH_SEARCH_PATHS[sdk=macosx*]" = "@executable_path/../Frameworks";
MACOSX_DEPLOYMENT_TARGET = 12.0;
MARKETING_VERSION = 1.0;
PRODUCT_BUNDLE_IDENTIFIER = io.audiokit.WaveformDemo;
PRODUCT_NAME = "$(TARGET_NAME)";
SDKROOT = auto;
SUPPORTED_PLATFORMS = "iphoneos iphonesimulator macosx";
SWIFT_EMIT_LOC_STRINGS = YES;
SWIFT_VERSION = 5.0;
TARGETED_DEVICE_FAMILY = "1,2";
};
name = Release;
};
/* End XCBuildConfiguration section */
/* Begin XCConfigurationList section */
F1A202AB28DEA41E007CD919 /* Build configuration list for PBXProject "WaveformDemo" */ = {
isa = XCConfigurationList;
buildConfigurations = (
F1A202BD28DEA420007CD919 /* Debug */,
F1A202BE28DEA420007CD919 /* Release */,
);
defaultConfigurationIsVisible = 0;
defaultConfigurationName = Release;
};
F1A202BF28DEA420007CD919 /* Build configuration list for PBXNativeTarget "WaveformDemo" */ = {
isa = XCConfigurationList;
buildConfigurations = (
F1A202C028DEA420007CD919 /* Debug */,
F1A202C128DEA420007CD919 /* Release */,
);
defaultConfigurationIsVisible = 0;
defaultConfigurationName = Release;
};
/* End XCConfigurationList section */
/* Begin XCSwiftPackageProductDependency section */
F1A202C528DEA96C007CD919 /* Waveform */ = {
isa = XCSwiftPackageProductDependency;
productName = Waveform;
};
/* End XCSwiftPackageProductDependency section */
};
rootObject = F1A202A828DEA41E007CD919 /* Project object */;
}
================================================
FILE: Demo/WaveformDemo.xcodeproj/project.xcworkspace/contents.xcworkspacedata
================================================
<?xml version="1.0" encoding="UTF-8"?>
<Workspace
version = "1.0">
<FileRef
location = "self:">
</FileRef>
</Workspace>
================================================
FILE: Demo/WaveformDemo.xcodeproj/project.xcworkspace/xcshareddata/IDEWorkspaceChecks.plist
================================================
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0">
<dict>
<key>IDEDidComputeMac32BitWarning</key>
<true/>
</dict>
</plist>
================================================
FILE: LICENSE
================================================
MIT License
Copyright (c) 2022 AudioKit
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
================================================
FILE: Package.swift
================================================
// swift-tools-version:5.3
import PackageDescription
let package = Package(
name: "Waveform",
platforms: [.macOS(.v11), .iOS(.v14)],
products: [.library(name: "Waveform", targets: ["Waveform"])],
targets: [
.target(name: "Waveform", resources: [.process("Waveform.docc")]),
.testTarget(name: "WaveformTests", dependencies: ["Waveform"], resources: [.copy("beat.aiff")]),
]
)
================================================
FILE: README.md
================================================
# Waveform
GPU accelerated SwiftUI waveform view

## Documentation
The API Reference can be found on [the AudioKit Website](https://www.audiokit.io/Waveform).
Package contains a demo project and a playground to help you get started quickly.
================================================
FILE: Sources/Waveform/AVAudio+FloatData.swift
================================================
// Copyright AudioKit. All Rights Reserved. Revision History at http://github.com/AudioKit/Waveform/
import Accelerate
import AVFoundation
extension AVAudioPCMBuffer {
/// Returns audio data as an `Array` of `Float` Arrays.
///
/// If stereo:
/// - `floatChannelData?[0]` will contain an Array of left channel samples as `Float`
/// - `floatChannelData?[1]` will contains an Array of right channel samples as `Float`
func toFloatChannelData() -> [[Float]]? {
// Do we have PCM channel data?
guard let pcmFloatChannelData = floatChannelData else {
return nil
}
let channelCount = Int(format.channelCount)
let frameLength = Int(self.frameLength)
let stride = self.stride
// Preallocate our Array so we're not constantly thrashing while resizing as we append.
let zeroes: [Float] = Array(repeating: 0, count: frameLength)
var result = Array(repeating: zeroes, count: channelCount)
// Loop across our channels...
for channel in 0 ..< channelCount {
// Make sure we go through all of the frames...
for sampleIndex in 0 ..< frameLength {
result[channel][sampleIndex] = pcmFloatChannelData[channel][sampleIndex * stride]
}
}
return result
}
}
extension AVAudioFile {
/// converts to a 32 bit PCM buffer
func toAVAudioPCMBuffer() -> AVAudioPCMBuffer? {
guard let buffer = AVAudioPCMBuffer(pcmFormat: processingFormat,
frameCapacity: AVAudioFrameCount(length)) else { return nil }
do {
framePosition = 0
try read(into: buffer)
print("Created buffer with format")
} catch let error as NSError {
print("Cannot read into buffer " + error.localizedDescription)
}
return buffer
}
/// converts to Swift friendly Float array
public func floatChannelData() -> [[Float]]? {
guard let pcmBuffer = toAVAudioPCMBuffer(),
let data = pcmBuffer.toFloatChannelData() else { return nil }
return data
}
}
================================================
FILE: Sources/Waveform/Helpers.swift
================================================
// Copyright AudioKit. All Rights Reserved. Revision History at http://github.com/AudioKit/Waveform/
import Foundation
import Metal
import SwiftUI
#if canImport(UIKit)
import UIKit
#elseif canImport(AppKit)
import AppKit
#endif
/// Returns the minimums of chunks of binSize.
func binMin(samples: [Float], binSize: Int) -> [Float] {
var out: [Float] = .init(repeating: 0.0, count: samples.count / binSize)
// Note: we have to use a dumb while loop to avoid swift's Range and have
// decent perf in debug.
var bin = 0
while bin < out.count {
// Note: we could do the following but it's too slow in debug
// out[bin] = samples[(bin * binSize) ..< ((bin + 1) * binSize)].min()!
var v = Float.greatestFiniteMagnitude
let start: Int = bin * binSize
let end: Int = (bin + 1) * binSize
var i = start
while i < end {
v = min(samples[i], v)
i += 1
}
out[bin] = v
bin += 1
}
return out
}
/// Returns the maximums of chunks of binSize.
func binMax(samples: [Float], binSize: Int) -> [Float] {
var out: [Float] = .init(repeating: 0.0, count: samples.count / binSize)
// Note: we have to use a dumb while loop to avoid swift's Range and have
// decent perf in debug.
var bin = 0
while bin < out.count {
// Note: we could do the following but it's too slow in debug
// out[bin] = samples[(bin * binSize) ..< ((bin + 1) * binSize)].max()!
var v = -Float.greatestFiniteMagnitude
let start: Int = bin * binSize
let end: Int = (bin + 1) * binSize
var i = start
while i < end {
v = max(samples[i], v)
i += 1
}
out[bin] = v
bin += 1
}
return out
}
extension MTLDevice {
func makeBuffer(_ values: [Float]) -> MTLBuffer? {
makeBuffer(bytes: values, length: MemoryLayout<Float>.size * values.count)
}
}
public extension MTLRenderCommandEncoder {
func setFragmentBytes<T>(_ value: T, index: Int) {
var copy = value
setFragmentBytes(©, length: MemoryLayout<T>.size, index: index)
}
func setFragmentBytes<T>(_ value: T, index: Int32) {
var copy = value
setFragmentBytes(©, length: MemoryLayout<T>.size, index: Int(index))
}
}
extension Color {
var components: SIMD4<Float> {
var r: CGFloat = 0
var g: CGFloat = 0
var b: CGFloat = 0
var a: CGFloat = 0
#if canImport(UIKit)
UIColor(self).getRed(&r, green: &g, blue: &b, alpha: &a)
#elseif canImport(AppKit)
NSColor(self).usingColorSpace(.deviceRGB)!.getRed(&r, green: &g, blue: &b, alpha: &a)
#endif
return .init(Float(r), Float(g), Float(b), Float(a))
}
}
================================================
FILE: Sources/Waveform/Renderer.swift
================================================
// Copyright AudioKit. All Rights Reserved. Revision History at http://github.com/AudioKit/Waveform/
import Foundation
import Metal
import MetalKit
import SwiftUI
let MaxBuffers = 3
/// Parameters defining the look and feel of the waveform
struct Constants {
/// Foreground color
var color = SIMD4<Float>(1,1,1,1)
/// Initialize the Constants structure
/// - Parameter color: Foreground color
init(color: Color = .white) {
self.color = color.components
}
}
class Renderer: NSObject, MTKViewDelegate {
var device: MTLDevice!
var queue: MTLCommandQueue!
var pipeline: MTLRenderPipelineState!
var source = ""
public var constants = Constants()
private let inflightSemaphore = DispatchSemaphore(value: MaxBuffers)
var minBuffers: [MTLBuffer] = []
var maxBuffers: [MTLBuffer] = []
var samples = SampleBuffer(samples: [0])
var start = 0
var length = 0
init(device: MTLDevice) {
self.device = device
queue = device.makeCommandQueue()
let library = try! device.makeDefaultLibrary(bundle: Bundle.module)
let rpd = MTLRenderPipelineDescriptor()
rpd.vertexFunction = library.makeFunction(name: "waveform_vert")
rpd.fragmentFunction = library.makeFunction(name: "waveform_frag")
let colorAttachment = rpd.colorAttachments[0]!
colorAttachment.pixelFormat = .bgra8Unorm
colorAttachment.isBlendingEnabled = true
colorAttachment.sourceRGBBlendFactor = .sourceAlpha
colorAttachment.sourceAlphaBlendFactor = .sourceAlpha
colorAttachment.destinationRGBBlendFactor = .oneMinusSourceAlpha
colorAttachment.destinationAlphaBlendFactor = .oneMinusSourceAlpha
pipeline = try! device.makeRenderPipelineState(descriptor: rpd)
minBuffers = [device.makeBuffer([0])!]
maxBuffers = [device.makeBuffer([0])!]
super.init()
}
func mtkView(_: MTKView, drawableSizeWillChange _: CGSize) {}
func selectBuffers(width: CGFloat) -> (MTLBuffer?, MTLBuffer?) {
var level = 0
for (minBuffer, maxBuffer) in zip(minBuffers, maxBuffers) {
if CGFloat(minBuffer.length / MemoryLayout<Float>.size) < width {
return (minBuffer, maxBuffer)
}
level += 1
}
// Use optional binding to safely access last element of each array
if let minBufferLast = minBuffers.last, let maxBufferLast = maxBuffers.last {
return (minBufferLast, maxBufferLast)
} else {
// If either array is empty, return nil
return (nil, nil)
}
}
func encode(to commandBuffer: MTLCommandBuffer,
pass: MTLRenderPassDescriptor,
width: CGFloat)
{
pass.colorAttachments[0].clearColor = MTLClearColorMake(0, 0, 0, 0)
let highestResolutionCount = Float(samples.samples.count)
let startFactor = Float(start) / highestResolutionCount
let lengthFactor = Float(length) / highestResolutionCount
let (minBufferOpt, maxBufferOpt) = selectBuffers(width: width / CGFloat(lengthFactor))
guard let minBuffer = minBufferOpt, let maxBuffer = maxBufferOpt else {
//early return to gracefully fail.
return
}
let enc = commandBuffer.makeRenderCommandEncoder(descriptor: pass)!
enc.setRenderPipelineState(pipeline)
let bufferLength = Float(minBuffer.length / MemoryLayout<Float>.size)
let bufferStart = Int(bufferLength * startFactor)
var bufferCount = Int(bufferLength * lengthFactor)
enc.setFragmentBuffer(minBuffer, offset: bufferStart * MemoryLayout<Float>.size, index: 0)
enc.setFragmentBuffer(maxBuffer, offset: bufferStart * MemoryLayout<Float>.size, index: 1)
assert(minBuffer.length == maxBuffer.length)
enc.setFragmentBytes(&bufferCount, length: MemoryLayout<Int32>.size, index: 2)
let c = [constants]
enc.setFragmentBytes(c, length: MemoryLayout<Constants>.size, index: 3)
enc.drawPrimitives(type: .triangleStrip, vertexStart: 0, vertexCount: 4)
enc.endEncoding()
}
func draw(in view: MTKView) {
let size = view.frame.size
let w = Float(size.width)
let h = Float(size.height)
// let scale = Float(view.contentScaleFactor)
if w == 0 || h == 0 {
return
}
// use semaphore to encode 3 frames ahead
_ = inflightSemaphore.wait(timeout: DispatchTime.distantFuture)
let commandBuffer = queue.makeCommandBuffer()!
let semaphore = inflightSemaphore
commandBuffer.addCompletedHandler { _ in
semaphore.signal()
}
if let renderPassDescriptor = view.currentRenderPassDescriptor, let currentDrawable = view.currentDrawable {
encode(to: commandBuffer, pass: renderPassDescriptor, width: size.width)
commandBuffer.present(currentDrawable)
}
commandBuffer.commit()
}
func set(samples: SampleBuffer, start: Int, length: Int) {
self.start = start
self.length = length
if samples === self.samples {
return
}
self.samples = samples
let buffers = makeBuffers(device: device, samples: samples)
self.minBuffers = buffers.0
self.maxBuffers = buffers.1
}
}
func makeBuffers(device: MTLDevice, samples: SampleBuffer) -> ([MTLBuffer], [MTLBuffer]) {
var minSamples = samples.samples
var maxSamples = samples.samples
var s = samples.samples.count
var minBuffers: [MTLBuffer] = []
var maxBuffers: [MTLBuffer] = []
while s > 2 {
minBuffers.append(device.makeBuffer(minSamples)!)
maxBuffers.append(device.makeBuffer(maxSamples)!)
minSamples = binMin(samples: minSamples, binSize: 2)
maxSamples = binMax(samples: maxSamples, binSize: 2)
s /= 2
}
return (minBuffers, maxBuffers)
}
================================================
FILE: Sources/Waveform/SampleBuffer.swift
================================================
// Copyright AudioKit. All Rights Reserved. Revision History at http://github.com/AudioKit/Waveform/
import Foundation
/// Immutable data for samples so we can quickly compare to see if we should recompute.
public final class SampleBuffer: Sendable {
let samples: [Float]
/// Initialize the buffer with samples
public init(samples: [Float]) {
self.samples = samples
}
/// Number of samples
public var count: Int {
samples.count
}
}
================================================
FILE: Sources/Waveform/Waveform.docc/Waveform.md
================================================
# ``Waveform``
GPU accelerated SwiftUI waveform view
## Overview
Code is hosted on Github: [](https://github.com/AudioKit/Waveform/)

## Topics
### <!--@START_MENU_TOKEN@-->Group<!--@END_MENU_TOKEN@-->
- <!--@START_MENU_TOKEN@-->``Symbol``<!--@END_MENU_TOKEN@-->
================================================
FILE: Sources/Waveform/Waveform.metal
================================================
// Copyright AudioKit. All Rights Reserved. Revision History at http://github.com/AudioKit/Waveform/
#include <metal_stdlib>
using namespace metal;
kernel void waveform_bin(device const float* in,
device float* out_min,
device float* out_max,
constant uint& count,
uint tid [[ thread_position_in_grid]]) {
if (tid >= count) {
return;
}
auto a = in[tid*2];
auto b = in[tid*2+1];
out_min[tid] = min(a, b);
out_max[tid] = max(a, b);
}
struct FragIn {
float4 position [[ position ]];
float2 uv; // (0, 1) x (-1, 1)
};
constant float2 pos[4] = { {-1,-1}, {1,-1}, {-1,1}, {1,1 } };
constant float2 uv[4] = { {0, -1}, {1, -1}, {0,1}, {1,1 } };
vertex FragIn waveform_vert(uint id [[ vertex_id ]]) {
FragIn out;
out.position = float4(pos[id], 0, 1);
out.uv = uv[id];
return out;
}
struct Constants {
float4 color;
};
float sample_waveform(device const float* min_waveform,
device const float* max_waveform,
uint count,
float2 uv) {
int x = clamp(int(count * uv.x), 0, int(count));
auto min_value = min_waveform[x];
auto max_value = max_waveform[x];
auto falloff = 4 * length(fwidth(uv));
// Feather the top and bottom.
auto s0 = smoothstep(min_value - falloff, min_value, uv.y);
auto s1 = 1.0 - smoothstep(max_value, max_value + falloff, uv.y);
return s0 * s1;
}
fragment half4 waveform_frag(FragIn in [[ stage_in ]],
device const float* min_waveform,
device const float* max_waveform,
constant uint& count,
constant Constants& constants) {
half s = sample_waveform(min_waveform, max_waveform, count, in.uv);
half4 color = half4(constants.color);
color.a *= s;
return color;
}
================================================
FILE: Sources/Waveform/Waveform.swift
================================================
// Copyright AudioKit. All Rights Reserved. Revision History at http://github.com/AudioKit/Waveform/
import AVFoundation
import MetalKit
import SwiftUI
#if os(macOS)
/// Waveform SwiftUI View
public struct Waveform: NSViewRepresentable {
var samples: SampleBuffer
var start: Int
var length: Int
var constants: Constants = Constants()
/// Initialize the waveform
/// - Parameters:
/// - samples: All samples able to be displayed
/// - start: Which sample on which to start displaying samples
/// - length: The width of the entire waveform in samples
/// - constants: Look and feel parameters for the waveform
public init(samples: SampleBuffer, start: Int = 0, length: Int = 0) {
self.samples = samples
self.start = start
if length > 0 {
self.length = min(length, samples.samples.count - start)
} else {
self.length = samples.samples.count - start
}
}
/// Class required by NSViewRepresentable
public class Coordinator {
var renderer: Renderer
init(constants: Constants) {
renderer = Renderer(device: MTLCreateSystemDefaultDevice()!)
renderer.constants = constants
}
}
/// Required by NSViewRepresentable
public func makeCoordinator() -> Coordinator {
return Coordinator(constants: constants)
}
/// Required by NSViewRepresentable
public func makeNSView(context: Context) -> some NSView {
let metalView = MTKView(frame: CGRect(x: 0, y: 0, width: 1024, height: 768),
device: MTLCreateSystemDefaultDevice()!)
metalView.enableSetNeedsDisplay = true
metalView.isPaused = true
metalView.delegate = context.coordinator.renderer
metalView.layer?.isOpaque = false
return metalView
}
/// Required by NSViewRepresentable
public func updateNSView(_ nsView: NSViewType, context: Context) {
let renderer = context.coordinator.renderer
renderer.constants = constants
renderer.set(samples: samples, start: start, length: length)
nsView.setNeedsDisplay(nsView.bounds)
}
}
#else
/// Waveform SwiftUI View
public struct Waveform: UIViewRepresentable {
var samples: SampleBuffer
var start: Int
var length: Int
var constants: Constants = Constants()
/// Initialize the waveform
/// - Parameters:
/// - samples: All samples able to be displayed
/// - start: Which sample on which to start displaying samples
/// - length: The width of the entire waveform in samples
/// - constants: Look and feel parameters for the waveform
public init(samples: SampleBuffer, start: Int = 0, length: Int = 0) {
self.samples = samples
self.start = start
if length > 0 {
self.length = length
} else {
self.length = samples.samples.count
}
}
/// Required by UIViewRepresentable
public class Coordinator {
var renderer: Renderer
init(constants: Constants) {
renderer = Renderer(device: MTLCreateSystemDefaultDevice()!)
}
}
/// Required by UIViewRepresentable
public func makeCoordinator() -> Coordinator {
return Coordinator(constants: constants)
}
/// Required by UIViewRepresentable
public func makeUIView(context: Context) -> some UIView {
let metalView = MTKView(frame: CGRect(x: 0, y: 0, width: 1024, height: 768),
device: MTLCreateSystemDefaultDevice()!)
metalView.enableSetNeedsDisplay = true
metalView.isPaused = true
metalView.delegate = context.coordinator.renderer
metalView.layer.isOpaque = false
return metalView
}
/// Required by UIViewRepresentable
public func updateUIView(_ uiView: UIViewType, context: Context) {
let renderer = context.coordinator.renderer
renderer.constants = constants
renderer.set(samples: samples, start: start, length: length)
uiView.setNeedsDisplay()
}
}
#endif
extension Waveform {
/// Modifer to change the foreground color of the wheel
/// - Parameter foregroundColor: foreground color
public func foregroundColor(_ foregroundColor: Color) -> Waveform {
var copy = self
copy.constants = Constants(color: foregroundColor)
return copy
}
}
================================================
FILE: Tests/WaveformTests/MTLTexture+Image.swift
================================================
// Copyright AudioKit. All Rights Reserved. Revision History at http://github.com/AudioKit/Waveform/
import CoreGraphics
import Foundation
import Metal
func createImage(data: UnsafeMutablePointer<UInt8>, w: Int, h: Int) -> CGImage {
let dataSize = 4 * w * h
let provider = CGDataProvider(dataInfo: nil, data: data, size: dataSize, releaseData: { _, _, _ in })!
let colorSpace = CGColorSpaceCreateDeviceRGB()
let image = CGImage(width: w,
height: h,
bitsPerComponent: 8,
bitsPerPixel: 32,
bytesPerRow: w * 4,
space: colorSpace,
bitmapInfo: .init(rawValue: CGImageAlphaInfo.noneSkipLast.rawValue),
provider: provider,
decode: nil,
shouldInterpolate: true,
intent: .defaultIntent)!
return image
}
extension MTLTexture {
var cgImage: CGImage {
let dataSize = width * height * 4
let ptr = UnsafeMutablePointer<UInt8>.allocate(capacity: dataSize)
switch pixelFormat {
case .bgra8Unorm:
getBytes(ptr, bytesPerRow: width * 4, from: MTLRegionMake2D(0, 0, width, height), mipmapLevel: 0)
for i in 0 ..< (width * height) {
swap(&ptr[4 * i], &ptr[4 * i + 2])
}
default:
fatalError()
}
return createImage(data: ptr, w: width, h: height)
}
var isBlack: Bool {
let dataSize = width * height * 4
let ptr = UnsafeMutablePointer<UInt8>.allocate(capacity: dataSize)
defer {
ptr.deallocate()
}
switch pixelFormat {
case .bgra8Unorm:
getBytes(ptr, bytesPerRow: width * 4, from: MTLRegionMake2D(0, 0, width, height), mipmapLevel: 0)
default:
fatalError()
}
for x in 0 ..< dataSize {
if ptr[x] != 0 {
return false
}
}
return true
}
}
================================================
FILE: Tests/WaveformTests/WaveformTests.swift
================================================
// Copyright AudioKit. All Rights Reserved. Revision History at http://github.com/AudioKit/Waveform/
import AVFoundation
import CoreFoundation
import CoreGraphics
import Metal
import MetalKit
@testable import Waveform
import XCTest
final class WaveformTests: XCTestCase {
let device = MTLCreateSystemDefaultDevice()
var queue: MTLCommandQueue!
var texture: MTLTexture!
var pass: MTLRenderPassDescriptor!
override func setUp() {
guard let device = device else { return }
queue = device.makeCommandQueue()!
let w = 512
let h = 512
let textureDesc = MTLTextureDescriptor()
textureDesc.pixelFormat = .bgra8Unorm
textureDesc.width = w
textureDesc.height = h
textureDesc.usage = [.renderTarget, .shaderRead, .shaderWrite]
texture = device.makeTexture(descriptor: textureDesc)
XCTAssertNotNil(texture)
pass = MTLRenderPassDescriptor()
pass.colorAttachments[0].texture = texture
pass.colorAttachments[0].storeAction = .store
pass.colorAttachments[0].loadAction = .clear
}
func writeCGImage(image: CGImage, url: CFURL) {
#if os(macOS)
let dest = CGImageDestinationCreateWithURL(url, kUTTypePNG, 1, nil)!
CGImageDestinationAddImage(dest, image, nil)
assert(CGImageDestinationFinalize(dest))
#endif
}
func showTexture(texture: MTLTexture, name: String) {
let tmpURL = FileManager.default.temporaryDirectory.appendingPathComponent(name)
print("saving to \(tmpURL)")
writeCGImage(image: texture.cgImage, url: tmpURL as CFURL)
}
func render(samples: [Float]) async {
guard let device = device else { return }
let renderer = Renderer(device: device)
await renderer.set(samples: SampleBuffer(samples: samples), start: 0, length: samples.count)
let commandBuffer = queue.makeCommandBuffer()!
renderer.encode(to: commandBuffer, pass: pass, width: 512)
#if os(macOS)
let blit = commandBuffer.makeBlitCommandEncoder()!
blit.synchronize(resource: texture)
blit.endEncoding()
#endif
commandBuffer.commit()
commandBuffer.waitUntilCompleted()
XCTAssertFalse(texture.isBlack)
showTexture(texture: texture, name: "Waveform.png")
}
func testRenderBeat() async throws {
guard let url = Bundle.module.url(forResource: "beat", withExtension: "aiff") else {
XCTFail()
return
}
let file = try! AVAudioFile(forReading: url)
let stereo = file.floatChannelData()!
await render(samples: stereo[0])
}
}
================================================
FILE: Waveform.playground/Contents.swift
================================================
import PlaygroundSupport
import SwiftUI
import Waveform
struct WaveformDemoView: View {
var samples: [Float] {
var s: [Float] = []
let size = 1000
for i in 0 ..< size {
let sine = sin(Float(i * 2) * .pi / Float(size)) * 0.9
s.append(sine + 0.1 * Float.random(in: -1 ... 1))
}
return s
}
@State var start = 0.0
@State var length = 1.0
let formatter = NumberFormatter()
var body: some View {
Waveform(samples: SampleBuffer(samples: samples),
start: 0,
length: 1000)
.padding()
}
}
PlaygroundPage.current.setLiveView(WaveformDemoView().frame(width: 1100, height: 500))
PlaygroundPage.current.needsIndefiniteExecution = true
================================================
FILE: Waveform.playground/contents.xcplayground
================================================
<?xml version="1.0" encoding="UTF-8" standalone="yes"?>
<playground version='5.0' target-platform='macos' display-mode='rendered' buildActiveScheme='true' executeOnSourceChanges='true' importAppTypes='true'>
<timeline fileName='timeline.xctimeline'/>
</playground>
gitextract_iqm9pz60/
├── .github/
│ └── workflows/
│ └── tests.yml
├── .gitignore
├── .spi.yml
├── .swiftpm/
│ └── xcode/
│ ├── package.xcworkspace/
│ │ └── contents.xcworkspacedata
│ └── xcshareddata/
│ └── xcschemes/
│ └── Waveform.xcscheme
├── Demo/
│ ├── WaveformDemo/
│ │ ├── Assets.xcassets/
│ │ │ ├── AccentColor.colorset/
│ │ │ │ └── Contents.json
│ │ │ ├── AppIcon.appiconset/
│ │ │ │ └── Contents.json
│ │ │ └── Contents.json
│ │ ├── ContentView.swift
│ │ ├── MinimapView.swift
│ │ ├── Preview Content/
│ │ │ └── Preview Assets.xcassets/
│ │ │ └── Contents.json
│ │ ├── WaveformDemo.entitlements
│ │ └── WaveformDemoApp.swift
│ └── WaveformDemo.xcodeproj/
│ ├── project.pbxproj
│ └── project.xcworkspace/
│ ├── contents.xcworkspacedata
│ └── xcshareddata/
│ └── IDEWorkspaceChecks.plist
├── LICENSE
├── Package.swift
├── README.md
├── Sources/
│ └── Waveform/
│ ├── AVAudio+FloatData.swift
│ ├── Helpers.swift
│ ├── Renderer.swift
│ ├── SampleBuffer.swift
│ ├── Waveform.docc/
│ │ └── Waveform.md
│ ├── Waveform.metal
│ └── Waveform.swift
├── Tests/
│ └── WaveformTests/
│ ├── MTLTexture+Image.swift
│ ├── WaveformTests.swift
│ └── beat.aiff
└── Waveform.playground/
├── Contents.swift
└── contents.xcplayground
Condensed preview — 31 files, each showing path, character count, and a content snippet. Download the .json file or copy for the full structured content (62K chars).
[
{
"path": ".github/workflows/tests.yml",
"chars": 1032,
"preview": "name: Tests\n\non:\n workflow_dispatch:\n push:\n branches: [main]\n pull_request:\n branches: [main]\n\njobs:\n build:\n"
},
{
"path": ".gitignore",
"chars": 2181,
"preview": "# Xcode\n#\n# gitignore contributors: remember to update Global/Xcode.gitignore, Objective-C.gitignore & Swift.gitignore\n\n"
},
{
"path": ".spi.yml",
"chars": 70,
"preview": "version: 1\nbuilder:\n configs:\n - documentation_targets: [Waveform]"
},
{
"path": ".swiftpm/xcode/package.xcworkspace/contents.xcworkspacedata",
"chars": 135,
"preview": "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n<Workspace\n version = \"1.0\">\n <FileRef\n location = \"self:\">\n </FileRef"
},
{
"path": ".swiftpm/xcode/xcshareddata/xcschemes/Waveform.xcscheme",
"chars": 4643,
"preview": "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n<Scheme\n LastUpgradeVersion = \"1400\"\n version = \"1.3\">\n <BuildAction\n "
},
{
"path": "Demo/WaveformDemo/Assets.xcassets/AccentColor.colorset/Contents.json",
"chars": 123,
"preview": "{\n \"colors\" : [\n {\n \"idiom\" : \"universal\"\n }\n ],\n \"info\" : {\n \"author\" : \"xcode\",\n \"version\" : 1\n }"
},
{
"path": "Demo/WaveformDemo/Assets.xcassets/AppIcon.appiconset/Contents.json",
"chars": 999,
"preview": "{\n \"images\" : [\n {\n \"idiom\" : \"universal\",\n \"platform\" : \"ios\",\n \"size\" : \"1024x1024\"\n },\n {\n "
},
{
"path": "Demo/WaveformDemo/Assets.xcassets/Contents.json",
"chars": 63,
"preview": "{\n \"info\" : {\n \"author\" : \"xcode\",\n \"version\" : 1\n }\n}\n"
},
{
"path": "Demo/WaveformDemo/ContentView.swift",
"chars": 1394,
"preview": "// Copyright AudioKit. All Rights Reserved. Revision History at http://github.com/AudioKit/Waveform/\n\nimport AVFoundatio"
},
{
"path": "Demo/WaveformDemo/MinimapView.swift",
"chars": 1922,
"preview": "// Copyright AudioKit. All Rights Reserved. Revision History at http://github.com/AudioKit/Waveform/\n\nimport AVFoundatio"
},
{
"path": "Demo/WaveformDemo/Preview Content/Preview Assets.xcassets/Contents.json",
"chars": 63,
"preview": "{\n \"info\" : {\n \"author\" : \"xcode\",\n \"version\" : 1\n }\n}\n"
},
{
"path": "Demo/WaveformDemo/WaveformDemo.entitlements",
"chars": 322,
"preview": "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n<!DOCTYPE plist PUBLIC \"-//Apple//DTD PLIST 1.0//EN\" \"http://www.apple.com/DTDs/P"
},
{
"path": "Demo/WaveformDemo/WaveformDemoApp.swift",
"chars": 145,
"preview": "import SwiftUI\n\n@main\nstruct WaveformDemoApp: App {\n var body: some Scene {\n WindowGroup {\n Content"
},
{
"path": "Demo/WaveformDemo.xcodeproj/project.pbxproj",
"chars": 17034,
"preview": "// !$*UTF8*$!\n{\n\tarchiveVersion = 1;\n\tclasses = {\n\t};\n\tobjectVersion = 56;\n\tobjects = {\n\n/* Begin PBXBuildFile section *"
},
{
"path": "Demo/WaveformDemo.xcodeproj/project.xcworkspace/contents.xcworkspacedata",
"chars": 135,
"preview": "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n<Workspace\n version = \"1.0\">\n <FileRef\n location = \"self:\">\n </FileRef"
},
{
"path": "Demo/WaveformDemo.xcodeproj/project.xcworkspace/xcshareddata/IDEWorkspaceChecks.plist",
"chars": 238,
"preview": "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n<!DOCTYPE plist PUBLIC \"-//Apple//DTD PLIST 1.0//EN\" \"http://www.apple.com/DTDs/P"
},
{
"path": "LICENSE",
"chars": 1065,
"preview": "MIT License\n\nCopyright (c) 2022 AudioKit\n\nPermission is hereby granted, free of charge, to any person obtaining a copy\no"
},
{
"path": "Package.swift",
"chars": 413,
"preview": "// swift-tools-version:5.3\n\nimport PackageDescription\n\nlet package = Package(\n name: \"Waveform\",\n platforms: [.mac"
},
{
"path": "README.md",
"chars": 313,
"preview": "# Waveform\n\nGPU accelerated SwiftUI waveform view\n\n\n\n"
},
{
"path": "Sources/Waveform/AVAudio+FloatData.swift",
"chars": 2171,
"preview": "// Copyright AudioKit. All Rights Reserved. Revision History at http://github.com/AudioKit/Waveform/\n\nimport Accelerate\n"
},
{
"path": "Sources/Waveform/Helpers.swift",
"chars": 2831,
"preview": "// Copyright AudioKit. All Rights Reserved. Revision History at http://github.com/AudioKit/Waveform/\n\nimport Foundation\n"
},
{
"path": "Sources/Waveform/Renderer.swift",
"chars": 6024,
"preview": "// Copyright AudioKit. All Rights Reserved. Revision History at http://github.com/AudioKit/Waveform/\n\nimport Foundation\n"
},
{
"path": "Sources/Waveform/SampleBuffer.swift",
"chars": 480,
"preview": "// Copyright AudioKit. All Rights Reserved. Revision History at http://github.com/AudioKit/Waveform/\n\nimport Foundation\n"
},
{
"path": "Sources/Waveform/Waveform.docc/Waveform.md",
"chars": 294,
"preview": "# ``Waveform``\n\nGPU accelerated SwiftUI waveform view\n\n## Overview\n\nCode is hosted on Github: [](https://github.com/Audi"
},
{
"path": "Sources/Waveform/Waveform.metal",
"chars": 2000,
"preview": "// Copyright AudioKit. All Rights Reserved. Revision History at http://github.com/AudioKit/Waveform/\n\n#include <metal_st"
},
{
"path": "Sources/Waveform/Waveform.swift",
"chars": 4454,
"preview": "// Copyright AudioKit. All Rights Reserved. Revision History at http://github.com/AudioKit/Waveform/\n\nimport AVFoundatio"
},
{
"path": "Tests/WaveformTests/MTLTexture+Image.swift",
"chars": 2084,
"preview": "// Copyright AudioKit. All Rights Reserved. Revision History at http://github.com/AudioKit/Waveform/\n\nimport CoreGraphic"
},
{
"path": "Tests/WaveformTests/WaveformTests.swift",
"chars": 2703,
"preview": "// Copyright AudioKit. All Rights Reserved. Revision History at http://github.com/AudioKit/Waveform/\n\nimport AVFoundatio"
},
{
"path": "Waveform.playground/Contents.swift",
"chars": 771,
"preview": "import PlaygroundSupport\nimport SwiftUI\nimport Waveform\n\nstruct WaveformDemoView: View {\n var samples: [Float] {\n "
},
{
"path": "Waveform.playground/contents.xcplayground",
"chars": 268,
"preview": "<?xml version=\"1.0\" encoding=\"UTF-8\" standalone=\"yes\"?>\n<playground version='5.0' target-platform='macos' display-mode='"
}
]
// ... and 1 more files (download for full content)
About this extraction
This page contains the full source code of the AudioKit/Waveform GitHub repository, extracted and formatted as plain text for AI agents and large language models (LLMs). The extraction includes 31 files (55.0 KB), approximately 15.8k tokens. Use this with OpenClaw, Claude, ChatGPT, Cursor, Windsurf, or any other AI tool that accepts text input. You can copy the full output to your clipboard or download it as a .txt file.
Extracted by GitExtract — free GitHub repo to text converter for AI. Built by Nikandr Surkov.