Repository: jc211/NeRFCapture
Branch: main
Commit: 312cb01efd5b
Files: 30
Total size: 84.1 KB
Directory structure:
gitextract_wykh3jz3/
├── .gitignore
├── CITATION.cff
├── LICENCE
├── NeRFCapture/
│ ├── AppDelegate.swift
│ ├── Assets.xcassets/
│ │ ├── AccentColor.colorset/
│ │ │ └── Contents.json
│ │ ├── AppIcon.appiconset/
│ │ │ └── Contents.json
│ │ ├── Contents.json
│ │ └── NeRFCaptureSplash.imageset/
│ │ └── Contents.json
│ ├── DDSModel/
│ │ ├── NeRFCaptureData.c
│ │ ├── NeRFCaptureData.h
│ │ └── NeRFCaptureData.idl
│ ├── DDSWriter.swift
│ ├── DatasetWriter.swift
│ ├── Info.plist
│ ├── Models/
│ │ ├── AppState.swift
│ │ └── Manifest.swift
│ ├── Networking/
│ │ └── CycloneDDS-Bridging-Header.h
│ ├── Preview Content/
│ │ └── Preview Assets.xcassets/
│ │ └── Contents.json
│ ├── Shaders.metal
│ ├── Utils.swift
│ ├── ViewModels/
│ │ └── ARViewModel.swift
│ └── Views/
│ ├── ARViewContainer.swift
│ └── ContentView.swift
├── NeRFCapture.xcodeproj/
│ ├── project.pbxproj
│ ├── project.xcworkspace/
│ │ ├── contents.xcworkspacedata
│ │ └── xcshareddata/
│ │ ├── IDEWorkspaceChecks.plist
│ │ ├── WorkspaceSettings.xcsettings
│ │ └── swiftpm/
│ │ └── Package.resolved
│ └── xcshareddata/
│ └── xcschemes/
│ └── NeRFCapture.xcscheme
└── README.md
================================================
FILE CONTENTS
================================================
================================================
FILE: .gitignore
================================================
## User settings
xcuserdata/
## compatibility with Xcode 8 and earlier (ignoring not required starting Xcode 9)
*.xcscmblueprint
*.xccheckout
## compatibility with Xcode 3 and earlier (ignoring not required starting Xcode 4)
build/
DerivedData/
*.moved-aside
*.pbxuser
!default.pbxuser
*.mode1v3
!default.mode1v3
*.mode2v3
!default.mode2v3
*.perspectivev3
!default.perspectivev3
## Obj-C/Swift specific
*.hmap
## App packaging
*.ipa
*.dSYM.zip
*.dSYM
## Playgrounds
timeline.xctimeline
playground.xcworkspace
# Swift Package Manager
#
# Add this line if you want to avoid checking in source code from Swift Package Manager dependencies.
# Packages/
# Package.pins
# Package.resolved
# *.xcodeproj
#
# Xcode automatically generates this directory with a .xcworkspacedata file and xcuserdata
# hence it is not needed unless you have added a package configuration file to your project
# .swiftpm
.build/
# CocoaPods
#
# We recommend against adding the Pods directory to your .gitignore. However
# you should judge for yourself, the pros and cons are mentioned at:
# https://guides.cocoapods.org/using/using-cocoapods.html#should-i-check-the-pods-directory-into-source-control
#
# Pods/
#
# Add this line if you want to avoid checking in source code from the Xcode workspace
# *.xcworkspace
# Carthage
#
# Add this line if you want to avoid checking in source code from Carthage dependencies.
# Carthage/Checkouts
Carthage/Build/
# Accio dependency management
Dependencies/
.accio/
# fastlane
#
# It is recommended to not store the screenshots in the git repo.
# Instead, use fastlane to re-generate the screenshots whenever they are needed.
# For more information about the recommended setup visit:
# https://docs.fastlane.tools/best-practices/source-control/#source-control
fastlane/report.xml
fastlane/Preview.html
fastlane/screenshots/**/*.png
fastlane/test_output
# Code Injection
#
# After new code Injection tools there's a generated folder /iOSInjectionProject
# https://github.com/johnno1962/injectionforxcode
iOSInjectionProject/
================================================
FILE: CITATION.cff
================================================
cff-version: 1.2.0
message: "If you use this software, please cite it as below."
authors:
- family-names: "Abou-Chakra"
given-names: "Jad"
orcid: "https://orcid.org/0000-0002-9122-3132"
title: "NeRFCapture: A tool for streaming posed images"
version: 1.0.0
url: "https://github.com/jc211/NeRFCapture"
================================================
FILE: LICENCE
================================================
MIT License
Copyright (c) 2023 Jad Abou-Chakra
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
================================================
FILE: NeRFCapture/AppDelegate.swift
================================================
//
// AppDelegate.swift
// NeRFCapture
//
// Created by Jad Abou-Chakra on 13/7/2022.
//
import UIKit
import SwiftUI
@main
class AppDelegate: UIResponder, UIApplicationDelegate {
var window: UIWindow?
var view: ContentView?
var appSettings = AppSettings()
var datasetWriter = DatasetWriter()
var ddsWriter = DDSWriter()
func application(_ application: UIApplication, didFinishLaunchingWithOptions launchOptions: [UIApplication.LaunchOptionsKey: Any]?) -> Bool {
// Create the SwiftUI view that provides the window contents.
let viewModel = ARViewModel(datasetWriter: datasetWriter, ddsWriter: ddsWriter)
let contentView = ContentView(viewModel: viewModel)
// Use a UIHostingController as window root view controller.
let window = UIWindow(frame: UIScreen.main.bounds)
window.rootViewController = UIHostingController(rootView: contentView)
self.window = window
self.view = contentView
window.makeKeyAndVisible()
return true
}
func applicationWillResignActive(_ application: UIApplication) {
// Sent when the application is about to move from active to inactive state. This can occur for certain types of temporary interruptions (such as an incoming phone call or SMS message) or when the user quits the application and it begins the transition to the background state.
// Use this method to pause ongoing tasks, disable timers, and invalidate graphics rendering callbacks. Games should use this method to pause the game.
datasetWriter.clean()
saveSettings()
}
func applicationDidEnterBackground(_ application: UIApplication) {
// Use this method to release shared resources, save user data, invalidate timers, and store enough application state information to restore your application to its current state in case it is terminated later.
datasetWriter.clean()
saveSettings()
}
func applicationWillEnterForeground(_ application: UIApplication) {
// Called as part of the transition from the background to the active state; here you can undo many of the changes made on entering the background.
}
func applicationDidBecomeActive(_ application: UIApplication) {
// Restart any tasks that were paused (or not yet started) while the application was inactive. If the application was previously in the background, optionally refresh the user interface.
}
func saveSettings() {
let encoder = JSONEncoder()
if let data = try? encoder.encode(appSettings) {
UserDefaults.standard.set(data, forKey: "appSettings")
}
}
func loadSettings() {
if let data = UserDefaults.standard.data(forKey: "appSettings") {
do {
let decoder = JSONDecoder()
appSettings = try decoder.decode(AppSettings.self, from: data)
} catch {
appSettings = AppSettings()
}
}
}
}
================================================
FILE: NeRFCapture/Assets.xcassets/AccentColor.colorset/Contents.json
================================================
{
"colors" : [
{
"color" : {
"color-space" : "srgb",
"components" : {
"alpha" : "1.000",
"blue" : "0xB0",
"green" : "0x11",
"red" : "0xAC"
}
},
"idiom" : "universal"
}
],
"info" : {
"author" : "xcode",
"version" : 1
}
}
================================================
FILE: NeRFCapture/Assets.xcassets/AppIcon.appiconset/Contents.json
================================================
{
"images" : [
{
"filename" : "Icon-40.png",
"idiom" : "iphone",
"scale" : "2x",
"size" : "20x20"
},
{
"filename" : "Icon-60.png",
"idiom" : "iphone",
"scale" : "3x",
"size" : "20x20"
},
{
"filename" : "Icon-29.png",
"idiom" : "iphone",
"scale" : "1x",
"size" : "29x29"
},
{
"filename" : "Icon-58.png",
"idiom" : "iphone",
"scale" : "2x",
"size" : "29x29"
},
{
"filename" : "Icon-87.png",
"idiom" : "iphone",
"scale" : "3x",
"size" : "29x29"
},
{
"filename" : "Icon-80.png",
"idiom" : "iphone",
"scale" : "2x",
"size" : "40x40"
},
{
"filename" : "Icon-120.png",
"idiom" : "iphone",
"scale" : "3x",
"size" : "40x40"
},
{
"filename" : "Icon-57.png",
"idiom" : "iphone",
"scale" : "1x",
"size" : "57x57"
},
{
"filename" : "Icon-114.png",
"idiom" : "iphone",
"scale" : "2x",
"size" : "57x57"
},
{
"filename" : "Icon-120 1.png",
"idiom" : "iphone",
"scale" : "2x",
"size" : "60x60"
},
{
"filename" : "Icon-180.png",
"idiom" : "iphone",
"scale" : "3x",
"size" : "60x60"
},
{
"filename" : "Icon-20.png",
"idiom" : "ipad",
"scale" : "1x",
"size" : "20x20"
},
{
"filename" : "Icon-40 2.png",
"idiom" : "ipad",
"scale" : "2x",
"size" : "20x20"
},
{
"filename" : "Icon-29 1.png",
"idiom" : "ipad",
"scale" : "1x",
"size" : "29x29"
},
{
"filename" : "Icon-58 1.png",
"idiom" : "ipad",
"scale" : "2x",
"size" : "29x29"
},
{
"filename" : "Icon-40 1.png",
"idiom" : "ipad",
"scale" : "1x",
"size" : "40x40"
},
{
"filename" : "Icon-80 1.png",
"idiom" : "ipad",
"scale" : "2x",
"size" : "40x40"
},
{
"filename" : "Icon-76.png",
"idiom" : "ipad",
"scale" : "1x",
"size" : "76x76"
},
{
"filename" : "Icon-152.png",
"idiom" : "ipad",
"scale" : "2x",
"size" : "76x76"
},
{
"filename" : "Icon-167.png",
"idiom" : "ipad",
"scale" : "2x",
"size" : "83.5x83.5"
},
{
"filename" : "Icon-1024.png",
"idiom" : "ios-marketing",
"scale" : "1x",
"size" : "1024x1024"
},
{
"filename" : "Icon-16.png",
"idiom" : "mac",
"scale" : "1x",
"size" : "16x16"
},
{
"filename" : "Icon-32.png",
"idiom" : "mac",
"scale" : "1x",
"size" : "32x32"
},
{
"filename" : "Icon-128.png",
"idiom" : "mac",
"scale" : "1x",
"size" : "128x128"
},
{
"filename" : "Icon-256.png",
"idiom" : "mac",
"scale" : "1x",
"size" : "256x256"
},
{
"filename" : "Icon-512.png",
"idiom" : "mac",
"scale" : "1x",
"size" : "512x512"
}
],
"info" : {
"author" : "xcode",
"version" : 1
}
}
================================================
FILE: NeRFCapture/Assets.xcassets/Contents.json
================================================
{
"info" : {
"author" : "xcode",
"version" : 1
}
}
================================================
FILE: NeRFCapture/Assets.xcassets/NeRFCaptureSplash.imageset/Contents.json
================================================
{
"images" : [
{
"filename" : "NeRFCaptureSplash 2.png",
"idiom" : "universal",
"scale" : "1x"
},
{
"filename" : "NeRFCaptureSplash 1.png",
"idiom" : "universal",
"scale" : "2x"
},
{
"filename" : "NeRFCaptureSplash.png",
"idiom" : "universal",
"scale" : "3x"
}
],
"info" : {
"author" : "xcode",
"version" : 1
}
}
================================================
FILE: NeRFCapture/DDSModel/NeRFCaptureData.c
================================================
/****************************************************************
Generated by Eclipse Cyclone DDS IDL to C Translator
File name: NeRFCaptureData.c
Cyclone DDS: V0.11.0
*****************************************************************/
#include "NeRFCaptureData.h"
static const uint32_t NeRFCaptureData_NeRFCaptureFrame_ops [] =
{
/* NeRFCaptureFrame */
DDS_OP_ADR | DDS_OP_FLAG_KEY | DDS_OP_FLAG_MU | DDS_OP_TYPE_4BY, offsetof (NeRFCaptureData_NeRFCaptureFrame, id),
DDS_OP_ADR | DDS_OP_TYPE_8BY | DDS_OP_FLAG_FP, offsetof (NeRFCaptureData_NeRFCaptureFrame, timestamp),
DDS_OP_ADR | DDS_OP_TYPE_4BY | DDS_OP_FLAG_FP, offsetof (NeRFCaptureData_NeRFCaptureFrame, fl_x),
DDS_OP_ADR | DDS_OP_TYPE_4BY | DDS_OP_FLAG_FP, offsetof (NeRFCaptureData_NeRFCaptureFrame, fl_y),
DDS_OP_ADR | DDS_OP_TYPE_4BY | DDS_OP_FLAG_FP, offsetof (NeRFCaptureData_NeRFCaptureFrame, cx),
DDS_OP_ADR | DDS_OP_TYPE_4BY | DDS_OP_FLAG_FP, offsetof (NeRFCaptureData_NeRFCaptureFrame, cy),
DDS_OP_ADR | DDS_OP_TYPE_ARR | DDS_OP_SUBTYPE_4BY | DDS_OP_FLAG_FP, offsetof (NeRFCaptureData_NeRFCaptureFrame, transform_matrix), 16u,
DDS_OP_ADR | DDS_OP_TYPE_4BY, offsetof (NeRFCaptureData_NeRFCaptureFrame, width),
DDS_OP_ADR | DDS_OP_TYPE_4BY, offsetof (NeRFCaptureData_NeRFCaptureFrame, height),
DDS_OP_ADR | DDS_OP_TYPE_SEQ | DDS_OP_SUBTYPE_1BY, offsetof (NeRFCaptureData_NeRFCaptureFrame, image),
DDS_OP_ADR | DDS_OP_TYPE_BLN, offsetof (NeRFCaptureData_NeRFCaptureFrame, has_depth),
DDS_OP_ADR | DDS_OP_TYPE_4BY, offsetof (NeRFCaptureData_NeRFCaptureFrame, depth_width),
DDS_OP_ADR | DDS_OP_TYPE_4BY, offsetof (NeRFCaptureData_NeRFCaptureFrame, depth_height),
DDS_OP_ADR | DDS_OP_TYPE_4BY | DDS_OP_FLAG_FP, offsetof (NeRFCaptureData_NeRFCaptureFrame, depth_scale),
DDS_OP_ADR | DDS_OP_TYPE_SEQ | DDS_OP_SUBTYPE_1BY, offsetof (NeRFCaptureData_NeRFCaptureFrame, depth_image),
DDS_OP_RTS,
/* key: id */
DDS_OP_KOF | 1, 0u /* order: 0 */
};
static const dds_key_descriptor_t NeRFCaptureData_NeRFCaptureFrame_keys[1] =
{
{ "id", 32, 0 }
};
/* Type Information:
[MINIMAL 58c15e051fee500969361b5dfc44] (#deps: 0)
[COMPLETE 29f4d038609e4e7e64774dbdf9fc] (#deps: 0)
*/
#define TYPE_INFO_CDR_NeRFCaptureData_NeRFCaptureFrame (unsigned char []){ \
0x60, 0x00, 0x00, 0x00, 0x01, 0x10, 0x00, 0x40, 0x28, 0x00, 0x00, 0x00, 0x24, 0x00, 0x00, 0x00, \
0x14, 0x00, 0x00, 0x00, 0xf1, 0x58, 0xc1, 0x5e, 0x05, 0x1f, 0xee, 0x50, 0x09, 0x69, 0x36, 0x1b, \
0x5d, 0xfc, 0x44, 0x00, 0x1c, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, \
0x00, 0x00, 0x00, 0x00, 0x02, 0x10, 0x00, 0x40, 0x28, 0x00, 0x00, 0x00, 0x24, 0x00, 0x00, 0x00, \
0x14, 0x00, 0x00, 0x00, 0xf2, 0x29, 0xf4, 0xd0, 0x38, 0x60, 0x9e, 0x4e, 0x7e, 0x64, 0x77, 0x4d, \
0xbd, 0xf9, 0xfc, 0x00, 0xf2, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, \
0x00, 0x00, 0x00, 0x00\
}
#define TYPE_INFO_CDR_SZ_NeRFCaptureData_NeRFCaptureFrame 100u
#define TYPE_MAP_CDR_NeRFCaptureData_NeRFCaptureFrame (unsigned char []){ \
0x30, 0x01, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0xf1, 0x58, 0xc1, 0x5e, 0x05, 0x1f, 0xee, 0x50, \
0x09, 0x69, 0x36, 0x1b, 0x5d, 0xfc, 0x44, 0x00, 0x18, 0x01, 0x00, 0x00, 0xf1, 0x51, 0x01, 0x00, \
0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x08, 0x01, 0x00, 0x00, 0x0f, 0x00, 0x00, 0x00, \
0x0b, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x31, 0x00, 0x07, 0xb8, 0x0b, 0xb7, 0x74, 0x00, \
0x0b, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x01, 0x00, 0x0a, 0xd7, 0xe6, 0xd5, 0x5b, 0x00, \
0x0b, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0x01, 0x00, 0x09, 0xb1, 0xf0, 0x2e, 0x42, 0x00, \
0x0b, 0x00, 0x00, 0x00, 0x03, 0x00, 0x00, 0x00, 0x01, 0x00, 0x09, 0x0f, 0xe4, 0xcf, 0x54, 0x00, \
0x0b, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0x01, 0x00, 0x09, 0x0b, 0xdf, 0xf8, 0x09, 0x00, \
0x0b, 0x00, 0x00, 0x00, 0x05, 0x00, 0x00, 0x00, 0x01, 0x00, 0x09, 0x47, 0x1c, 0x1f, 0x3f, 0x00, \
0x16, 0x00, 0x00, 0x00, 0x06, 0x00, 0x00, 0x00, 0x01, 0x00, 0x90, 0xf3, 0x01, 0x00, 0x00, 0x00, \
0x01, 0x00, 0x00, 0x00, 0x10, 0x09, 0x0e, 0x1f, 0x23, 0x7b, 0x00, 0x00, 0x0b, 0x00, 0x00, 0x00, \
0x07, 0x00, 0x00, 0x00, 0x01, 0x00, 0x07, 0xea, 0xae, 0x26, 0xa6, 0x00, 0x0b, 0x00, 0x00, 0x00, \
0x08, 0x00, 0x00, 0x00, 0x01, 0x00, 0x07, 0xb4, 0x35, 0xe2, 0x27, 0x00, 0x10, 0x00, 0x00, 0x00, \
0x09, 0x00, 0x00, 0x00, 0x01, 0x00, 0x80, 0xf3, 0x01, 0x00, 0x00, 0x02, 0x78, 0x80, 0x5a, 0x22, \
0x0b, 0x00, 0x00, 0x00, 0x0a, 0x00, 0x00, 0x00, 0x01, 0x00, 0x01, 0x71, 0x26, 0x29, 0xb6, 0x00, \
0x0b, 0x00, 0x00, 0x00, 0x0b, 0x00, 0x00, 0x00, 0x01, 0x00, 0x07, 0xb4, 0x1f, 0x76, 0xe8, 0x00, \
0x0b, 0x00, 0x00, 0x00, 0x0c, 0x00, 0x00, 0x00, 0x01, 0x00, 0x07, 0xd7, 0x32, 0xcd, 0xd0, 0x00, \
0x0b, 0x00, 0x00, 0x00, 0x0d, 0x00, 0x00, 0x00, 0x01, 0x00, 0x09, 0x0e, 0x97, 0xe6, 0xea, 0x00, \
0x10, 0x00, 0x00, 0x00, 0x0e, 0x00, 0x00, 0x00, 0x01, 0x00, 0x80, 0xf3, 0x01, 0x00, 0x00, 0x02, \
0x59, 0xf4, 0x30, 0x3f, 0x06, 0x02, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0xf2, 0x29, 0xf4, 0xd0, \
0x38, 0x60, 0x9e, 0x4e, 0x7e, 0x64, 0x77, 0x4d, 0xbd, 0xf9, 0xfc, 0x00, 0xee, 0x01, 0x00, 0x00, \
0xf2, 0x51, 0x01, 0x00, 0x2a, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x22, 0x00, 0x00, 0x00, \
0x4e, 0x65, 0x52, 0x46, 0x43, 0x61, 0x70, 0x74, 0x75, 0x72, 0x65, 0x44, 0x61, 0x74, 0x61, 0x3a, \
0x3a, 0x4e, 0x65, 0x52, 0x46, 0x43, 0x61, 0x70, 0x74, 0x75, 0x72, 0x65, 0x46, 0x72, 0x61, 0x6d, \
0x65, 0x00, 0x00, 0x00, 0xb6, 0x01, 0x00, 0x00, 0x0f, 0x00, 0x00, 0x00, 0x11, 0x00, 0x00, 0x00, \
0x00, 0x00, 0x00, 0x00, 0x31, 0x00, 0x07, 0x00, 0x03, 0x00, 0x00, 0x00, 0x69, 0x64, 0x00, 0x00, \
0x00, 0x00, 0x00, 0x00, 0x18, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x01, 0x00, 0x0a, 0x00, \
0x0a, 0x00, 0x00, 0x00, 0x74, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x00, 0x00, 0x00, \
0x13, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0x01, 0x00, 0x09, 0x00, 0x05, 0x00, 0x00, 0x00, \
0x66, 0x6c, 0x5f, 0x78, 0x00, 0x00, 0x00, 0x00, 0x13, 0x00, 0x00, 0x00, 0x03, 0x00, 0x00, 0x00, \
0x01, 0x00, 0x09, 0x00, 0x05, 0x00, 0x00, 0x00, 0x66, 0x6c, 0x5f, 0x79, 0x00, 0x00, 0x00, 0x00, \
0x11, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0x01, 0x00, 0x09, 0x00, 0x03, 0x00, 0x00, 0x00, \
0x63, 0x78, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x11, 0x00, 0x00, 0x00, 0x05, 0x00, 0x00, 0x00, \
0x01, 0x00, 0x09, 0x00, 0x03, 0x00, 0x00, 0x00, 0x63, 0x79, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, \
0x2b, 0x00, 0x00, 0x00, 0x06, 0x00, 0x00, 0x00, 0x01, 0x00, 0x90, 0xf3, 0x01, 0x00, 0x00, 0x00, \
0x01, 0x00, 0x00, 0x00, 0x10, 0x09, 0x00, 0x00, 0x11, 0x00, 0x00, 0x00, 0x74, 0x72, 0x61, 0x6e, \
0x73, 0x66, 0x6f, 0x72, 0x6d, 0x5f, 0x6d, 0x61, 0x74, 0x72, 0x69, 0x78, 0x00, 0x00, 0x00, 0x00, \
0x14, 0x00, 0x00, 0x00, 0x07, 0x00, 0x00, 0x00, 0x01, 0x00, 0x07, 0x00, 0x06, 0x00, 0x00, 0x00, \
0x77, 0x69, 0x64, 0x74, 0x68, 0x00, 0x00, 0x00, 0x15, 0x00, 0x00, 0x00, 0x08, 0x00, 0x00, 0x00, \
0x01, 0x00, 0x07, 0x00, 0x07, 0x00, 0x00, 0x00, 0x68, 0x65, 0x69, 0x67, 0x68, 0x74, 0x00, 0x00, \
0x00, 0x00, 0x00, 0x00, 0x18, 0x00, 0x00, 0x00, 0x09, 0x00, 0x00, 0x00, 0x01, 0x00, 0x80, 0xf3, \
0x01, 0x00, 0x00, 0x02, 0x06, 0x00, 0x00, 0x00, 0x69, 0x6d, 0x61, 0x67, 0x65, 0x00, 0x00, 0x00, \
0x18, 0x00, 0x00, 0x00, 0x0a, 0x00, 0x00, 0x00, 0x01, 0x00, 0x01, 0x00, 0x0a, 0x00, 0x00, 0x00, \
0x68, 0x61, 0x73, 0x5f, 0x64, 0x65, 0x70, 0x74, 0x68, 0x00, 0x00, 0x00, 0x1a, 0x00, 0x00, 0x00, \
0x0b, 0x00, 0x00, 0x00, 0x01, 0x00, 0x07, 0x00, 0x0c, 0x00, 0x00, 0x00, 0x64, 0x65, 0x70, 0x74, \
0x68, 0x5f, 0x77, 0x69, 0x64, 0x74, 0x68, 0x00, 0x00, 0x00, 0x00, 0x00, 0x1b, 0x00, 0x00, 0x00, \
0x0c, 0x00, 0x00, 0x00, 0x01, 0x00, 0x07, 0x00, 0x0d, 0x00, 0x00, 0x00, 0x64, 0x65, 0x70, 0x74, \
0x68, 0x5f, 0x68, 0x65, 0x69, 0x67, 0x68, 0x74, 0x00, 0x00, 0x00, 0x00, 0x1a, 0x00, 0x00, 0x00, \
0x0d, 0x00, 0x00, 0x00, 0x01, 0x00, 0x09, 0x00, 0x0c, 0x00, 0x00, 0x00, 0x64, 0x65, 0x70, 0x74, \
0x68, 0x5f, 0x73, 0x63, 0x61, 0x6c, 0x65, 0x00, 0x00, 0x00, 0x00, 0x00, 0x1e, 0x00, 0x00, 0x00, \
0x0e, 0x00, 0x00, 0x00, 0x01, 0x00, 0x80, 0xf3, 0x01, 0x00, 0x00, 0x02, 0x0c, 0x00, 0x00, 0x00, \
0x64, 0x65, 0x70, 0x74, 0x68, 0x5f, 0x69, 0x6d, 0x61, 0x67, 0x65, 0x00, 0x00, 0x00, 0x00, 0x00, \
0x22, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0xf2, 0x29, 0xf4, 0xd0, 0x38, 0x60, 0x9e, 0x4e, \
0x7e, 0x64, 0x77, 0x4d, 0xbd, 0xf9, 0xfc, 0xf1, 0x58, 0xc1, 0x5e, 0x05, 0x1f, 0xee, 0x50, 0x09, \
0x69, 0x36, 0x1b, 0x5d, 0xfc, 0x44\
}
#define TYPE_MAP_CDR_SZ_NeRFCaptureData_NeRFCaptureFrame 870u
const dds_topic_descriptor_t NeRFCaptureData_NeRFCaptureFrame_desc =
{
.m_size = sizeof (NeRFCaptureData_NeRFCaptureFrame),
.m_align = dds_alignof (NeRFCaptureData_NeRFCaptureFrame),
.m_flagset = DDS_TOPIC_FIXED_KEY | DDS_TOPIC_FIXED_KEY_XCDR2 | DDS_TOPIC_XTYPES_METADATA,
.m_nkeys = 1u,
.m_typename = "NeRFCaptureData::NeRFCaptureFrame",
.m_keys = NeRFCaptureData_NeRFCaptureFrame_keys,
.m_nops = 16,
.m_ops = NeRFCaptureData_NeRFCaptureFrame_ops,
.m_meta = "",
.type_information = { .data = TYPE_INFO_CDR_NeRFCaptureData_NeRFCaptureFrame, .sz = TYPE_INFO_CDR_SZ_NeRFCaptureData_NeRFCaptureFrame },
.type_mapping = { .data = TYPE_MAP_CDR_NeRFCaptureData_NeRFCaptureFrame, .sz = TYPE_MAP_CDR_SZ_NeRFCaptureData_NeRFCaptureFrame }
};
================================================
FILE: NeRFCapture/DDSModel/NeRFCaptureData.h
================================================
/****************************************************************
Generated by Eclipse Cyclone DDS IDL to C Translator
File name: NeRFCaptureData.h
Cyclone DDS: V0.11.0
*****************************************************************/
#ifndef DDSC_NERFCAPTUREDATA_H
#define DDSC_NERFCAPTUREDATA_H
#include "dds/ddsc/dds_public_impl.h"
#ifdef __cplusplus
extern "C" {
#endif
#ifndef DDS_SEQUENCE_OCTET_DEFINED
#define DDS_SEQUENCE_OCTET_DEFINED
typedef struct dds_sequence_octet
{
uint32_t _maximum;
uint32_t _length;
uint8_t *_buffer;
bool _release;
} dds_sequence_octet;
#define dds_sequence_octet__alloc() \
((dds_sequence_octet*) dds_alloc (sizeof (dds_sequence_octet)));
#define dds_sequence_octet_allocbuf(l) \
((uint8_t *) dds_alloc ((l) * sizeof (uint8_t)))
#endif /* DDS_SEQUENCE_OCTET_DEFINED */
#ifndef DDS_SEQUENCE_OCTET_DEFINED
#define DDS_SEQUENCE_OCTET_DEFINED
typedef struct dds_sequence_octet
{
uint32_t _maximum;
uint32_t _length;
uint8_t *_buffer;
bool _release;
} dds_sequence_octet;
#define dds_sequence_octet__alloc() \
((dds_sequence_octet*) dds_alloc (sizeof (dds_sequence_octet)));
#define dds_sequence_octet_allocbuf(l) \
((uint8_t *) dds_alloc ((l) * sizeof (uint8_t)))
#endif /* DDS_SEQUENCE_OCTET_DEFINED */
typedef struct NeRFCaptureData_NeRFCaptureFrame
{
uint32_t id;
double timestamp;
float fl_x;
float fl_y;
float cx;
float cy;
float transform_matrix[16];
uint32_t width;
uint32_t height;
dds_sequence_octet image;
bool has_depth;
uint32_t depth_width;
uint32_t depth_height;
float depth_scale;
dds_sequence_octet depth_image;
} NeRFCaptureData_NeRFCaptureFrame;
extern const dds_topic_descriptor_t NeRFCaptureData_NeRFCaptureFrame_desc;
#define NeRFCaptureData_NeRFCaptureFrame__alloc() \
((NeRFCaptureData_NeRFCaptureFrame*) dds_alloc (sizeof (NeRFCaptureData_NeRFCaptureFrame)));
#define NeRFCaptureData_NeRFCaptureFrame_free(d,o) \
dds_sample_free ((d), &NeRFCaptureData_NeRFCaptureFrame_desc, (o))
#ifdef __cplusplus
}
#endif
#endif /* DDSC_NERFCAPTUREDATA_H */
================================================
FILE: NeRFCapture/DDSModel/NeRFCaptureData.idl
================================================
module NeRFCaptureData
{
@final
struct NeRFCaptureFrame
{
@key
uint32 id;
double timestamp;
float fl_x;
float fl_y;
float cx;
float cy;
float transform_matrix[16];
uint32 width;
uint32 height;
sequence<octet> image;
boolean has_depth;
uint32 depth_width;
uint32 depth_height;
float depth_scale;
sequence<octet> depth_image;
};
};
================================================
FILE: NeRFCapture/DDSWriter.swift
================================================
//
// DDSWriter.swift
// NeRFCapture
//
// Created by Jad Abou-Chakra on 11/1/2023.
//
import Foundation
import ARKit
struct DDSConstants {
static let DDS_PUBLICATION_MATCHED_STATUS: UInt32 = 1 << DDS_PUBLICATION_MATCHED_STATUS_ID.rawValue
static let DDS_INCONSISTENT_TOPIC_STATUS: UInt32 = 1 << DDS_INCONSISTENT_TOPIC_STATUS_ID.rawValue
static let DDS_OFFERED_DEADLINE_MISSED_STATUS: UInt32 = 1 << DDS_OFFERED_DEADLINE_MISSED_STATUS_ID.rawValue
}
struct DDSState {
var ready = false
var domain: dds_entity_t? = nil
var participant: dds_entity_t? = nil
var listener: OpaquePointer! = nil
let topic_name: String = "Frames"
var topic: dds_entity_t? = nil
var writer: dds_entity_t? = nil
var qos: OpaquePointer! = nil
var rc: dds_return_t = dds_return_t()
var status:UInt32 = 0
}
class DDSWriter {
var dds = DDSState()
let rgbConverter = YUVToRGBFilter()
var counter = 0
@Published var peers: UInt32 = 0
func buildConfig() -> String {
let xml_config = """
<General>
<Interfaces>
<NetworkInterface name="en0" />
</Interfaces>
</General>
<Tracing>
<Category>
config
</Category>
<OutputFile>
stdout
</OutputFile>
</Tracing>
"""
return xml_config
}
func setupDDS() {
let domain_id: dds_domainid_t = 0
let xml_config = buildConfig()
dds.domain = dds_create_domain(domain_id, xml_config)
// Create listener
let observer = UnsafeMutableRawPointer(Unmanaged.passUnretained(self).toOpaque()) // void pointer to self
dds.listener = dds_create_listener(observer)
dds_lset_publication_matched(dds.listener) { entity, status, observer in
let mySelf = Unmanaged<DDSWriter>.fromOpaque(observer!).takeUnretainedValue()
DispatchQueue.main.async {
mySelf.peers = status.current_count
}
}
dds.participant = dds_create_participant(domain_id, nil, dds.listener)
if(dds.participant! < 0) {
print("Could not create participant")
return
}
// Setup Project Topic
withUnsafePointer(to: NeRFCaptureData_NeRFCaptureFrame_desc) { descPtr in
dds.topic = dds_create_topic(dds.participant!, descPtr, dds.topic_name, nil, nil)
}
// dds.topic = dds_create_topic(dds.participant!, &NeRFCaptureData_NeRFCaptureFrame_desc, dds.topic_name, nil, nil)
// dds.topic = dds_create_topic(dds.participant!, NeRFCaptureData_NeRFCaptureFrame_desc_ptr, dds.topic_name, nil, nil)
if(dds.topic! < 0) {
print("Could not create topic")
return
}
dds.qos = dds_create_qos()
dds_qset_resource_limits(
dds.qos,
2,
2,
2
)
// dds_qset_reliability(dds.qos, DDS_RELIABILITY_RELIABLE, 1 * 1000000000)
dds.writer = dds_create_writer(dds.participant!, dds.topic!, dds.qos!, dds.listener)
if(dds.writer! < 0) {
print("Could not create writer")
return
}
}
func writeFrameToTopic(frame: ARFrame) {
let w = UInt32(frame.camera.imageResolution.width)
let h = UInt32(frame.camera.imageResolution.height)
let flX = frame.camera.intrinsics[0, 0]
let flY = frame.camera.intrinsics[1, 1]
let cx = frame.camera.intrinsics[2, 0]
let cy = frame.camera.intrinsics[2, 1]
rgbConverter.applyFilter(frame: frame)
var data = dds_sequence_octet()
data._length = UInt32(rgbConverter.rgbBuffer.length)
data._buffer = rgbConverter.rgbBuffer.contents().bindMemory(to: UInt8.self, capacity: 1)
var depth_width = 0
var depth_height = 0
var has_depth = false
var depth_data = dds_sequence_octet()
if let sceneDepth = frame.sceneDepth {
CVPixelBufferLockBaseAddress(sceneDepth.depthMap, .readOnly)
has_depth = true
depth_width = CVPixelBufferGetWidth(sceneDepth.depthMap)
depth_height = CVPixelBufferGetHeight(sceneDepth.depthMap)
depth_data._length = UInt32(CVPixelBufferGetDataSize(sceneDepth.depthMap))
print("\(depth_width)x\(depth_height) - size = \(depth_data._length)")
depth_data._buffer = CVPixelBufferGetBaseAddress(frame.sceneDepth!.depthMap)!.bindMemory(to: UInt8.self, capacity: 1)
}
counter += 1
var msg = NeRFCaptureData_NeRFCaptureFrame(
id: UInt32(counter),
timestamp: frame.timestamp,
fl_x: flX,
fl_y: flY,
cx: cx,
cy: cy,
transform_matrix: tupleFromTransform(frame.camera.transform),
width: w,
height: h,
image: data,
has_depth: has_depth,
depth_width: UInt32(depth_width),
depth_height: UInt32(depth_height),
depth_scale: 1.0,
depth_image: depth_data
)
dds.rc = dds_write(dds.writer!, &msg)
if(dds.rc != DDS_RETCODE_OK) {
let message = String(cString:dds_strretcode(dds.rc))
print("Write Failed: \(message)")
}
if let sceneDepth = frame.sceneDepth {
CVPixelBufferUnlockBaseAddress(sceneDepth.depthMap, .readOnly)
}
}
func cleanDDS() {
if let domain = dds.domain {
dds.rc = dds_delete(domain)
}
}
}
================================================
FILE: NeRFCapture/DatasetWriter.swift
================================================
//
// DatasetWriter.swift
// NeRFCapture
//
// Created by Jad Abou-Chakra on 11/1/2023.
//
import Foundation
import ARKit
import Zip
extension UIImage {
func resizeImageTo(size: CGSize) -> UIImage? {
UIGraphicsBeginImageContextWithOptions(size, false, 0.0)
self.draw(in: CGRect(origin: CGPoint.zero, size: size))
let resizedImage = UIGraphicsGetImageFromCurrentImageContext()!
UIGraphicsEndImageContext()
return resizedImage
}
}
class DatasetWriter {
enum SessionState {
case SessionNotStarted
case SessionStarted
}
var manifest = Manifest()
var projectName = ""
var projectDir = getDocumentsDirectory()
var useDepthIfAvailable = true
@Published var currentFrameCounter = 0
@Published var writerState = SessionState.SessionNotStarted
func projectExists(_ projectDir: URL) -> Bool {
var isDir: ObjCBool = true
return FileManager.default.fileExists(atPath: projectDir.absoluteString, isDirectory: &isDir)
}
func initializeProject() throws {
let dateFormatter = DateFormatter()
dateFormatter.dateFormat = "YYMMddHHmmss"
projectName = dateFormatter.string(from: Date())
projectDir = getDocumentsDirectory()
.appendingPathComponent(projectName)
if projectExists(projectDir) {
throw AppError.projectAlreadyExists
}
do {
try FileManager.default.createDirectory(at: projectDir.appendingPathComponent("images"), withIntermediateDirectories: true)
}
catch {
print(error)
}
manifest = Manifest()
// The first frame will set these properly
manifest.w = 0
manifest.h = 0
// These don't matter since every frame will redefine them
manifest.flX = 1.0
manifest.flY = 1.0
manifest.cx = 320
manifest.cy = 240
manifest.depthIntegerScale = 1.0
writerState = .SessionStarted
}
func clean() {
guard case .SessionStarted = writerState else { return; }
writerState = .SessionNotStarted
DispatchQueue.global().async {
do {
try FileManager.default.removeItem(at: self.projectDir)
}
catch {
print("Could not cleanup project files")
}
}
}
func finalizeProject(zip: Bool = true) {
writerState = .SessionNotStarted
let manifest_path = getDocumentsDirectory()
.appendingPathComponent(projectName)
.appendingPathComponent("transforms.json")
writeManifestToPath(path: manifest_path)
DispatchQueue.global().async {
do {
if zip {
let _ = try Zip.quickZipFiles([self.projectDir], fileName: self.projectName)
}
try FileManager.default.removeItem(at: self.projectDir)
}
catch {
print("Could not zip")
}
}
}
func getCurrentFrameName() -> String {
let frameName = String(currentFrameCounter)
return frameName
}
func getFrameMetadata(_ frame: ARFrame, withDepth: Bool = false) -> Manifest.Frame {
let frameName = getCurrentFrameName()
let filePath = "images/\(frameName)"
let depthPath = "images/\(frameName).depth.png"
let manifest_frame = Manifest.Frame(
filePath: filePath,
depthPath: withDepth ? depthPath : nil,
transformMatrix: arrayFromTransform(frame.camera.transform),
timestamp: frame.timestamp,
flX: frame.camera.intrinsics[0, 0],
flY: frame.camera.intrinsics[1, 1],
cx: frame.camera.intrinsics[2, 0],
cy: frame.camera.intrinsics[2, 1],
w: Int(frame.camera.imageResolution.width),
h: Int(frame.camera.imageResolution.height)
)
return manifest_frame
}
func writeManifestToPath(path: URL) {
let encoder = JSONEncoder()
encoder.keyEncodingStrategy = .convertToSnakeCase
encoder.outputFormatting = .withoutEscapingSlashes
if let encoded = try? encoder.encode(manifest) {
do {
try encoded.write(to: path)
} catch {
print(error)
}
}
}
func writeFrameToDisk(frame: ARFrame, useDepthIfAvailable: Bool = true) {
let frameName = "\(getCurrentFrameName()).png"
let depthFrameName = "\(getCurrentFrameName()).depth.png"
let baseDir = projectDir
.appendingPathComponent("images")
let fileName = baseDir
.appendingPathComponent(frameName)
let depthFileName = baseDir
.appendingPathComponent(depthFrameName)
if manifest.w == 0 {
manifest.w = Int(frame.camera.imageResolution.width)
manifest.h = Int(frame.camera.imageResolution.height)
manifest.flX = frame.camera.intrinsics[0, 0]
manifest.flY = frame.camera.intrinsics[1, 1]
manifest.cx = frame.camera.intrinsics[2, 0]
manifest.cy = frame.camera.intrinsics[2, 1]
}
let useDepth = frame.sceneDepth != nil && useDepthIfAvailable
let frameMetadata = getFrameMetadata(frame, withDepth: useDepth)
let rgbBuffer = pixelBufferToUIImage(pixelBuffer: frame.capturedImage)
let depthBuffer = useDepth ? pixelBufferToUIImage(pixelBuffer: frame.sceneDepth!.depthMap).resizeImageTo(size: frame.camera.imageResolution) : nil
DispatchQueue.global().async {
do {
let rgbData = rgbBuffer.pngData()
try rgbData?.write(to: fileName)
if useDepth {
let depthData = depthBuffer!.pngData()
try depthData?.write(to: depthFileName)
}
}
catch {
print(error)
}
DispatchQueue.main.async {
self.manifest.frames.append(frameMetadata)
}
}
currentFrameCounter += 1
}
}
================================================
FILE: NeRFCapture/Info.plist
================================================
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0">
<dict>
<key>UIFileSharingEnabled</key>
<true/>
<key>UILaunchScreen</key>
<dict>
<key>UIColorName</key>
<string>AccentColor</string>
<key>UIImageName</key>
<string></string>
<key>UIImageRespectsSafeAreaInsets</key>
<false/>
</dict>
</dict>
</plist>
================================================
FILE: NeRFCapture/Models/AppState.swift
================================================
//
// AppState.swift
// NeRFCapture
//
// Created by Jad Abou-Chakra on 13/7/2022.
//
import Foundation
import Metal
import MetalKit
enum AppMode: Int, Codable {
case Online
case Offline
}
struct AppState {
var appMode: AppMode = .Online
var writerState: DatasetWriter.SessionState = .SessionNotStarted
var trackingState = ""
var projectName = ""
var numFrames = 0
var supportsDepth = false
// var stream = false
var ddsPeers: UInt32 = 0
var ddsReady = false
}
struct AppSettings: Codable {
var zipDataset = true
var startingAppMode = AppMode.Online
}
struct MetalState {
var device: MTLDevice!
var commandQueue: MTLCommandQueue!
var sharedUniformBuffer: MTLBuffer!
var imagePlaneVertexBuffer: MTLBuffer!
var capturedImagePipelineState: MTLRenderPipelineState!
var capturedImageTextureY: CVMetalTexture?
var capturedImageTextureCbCr: CVMetalTexture?
var capturedImageTextureCache: CVMetalTextureCache!
}
================================================
FILE: NeRFCapture/Models/Manifest.swift
================================================
//
// Manifest.swift
// NeRFCapture
//
// Created by Jad Abou-Chakra on 13/7/2022.
//
import Foundation
struct Manifest : Codable {
struct Frame : Codable {
let filePath: String
let depthPath: String?
let transformMatrix: [[Float]]
let timestamp: TimeInterval
let flX: Float
let flY: Float
let cx: Float
let cy: Float
let w: Int
let h: Int
}
var w: Int = 0
var h: Int = 0
var flX: Float = 0
var flY: Float = 0
var cx: Float = 0
var cy: Float = 0
var depthIntegerScale : Float?
var depthSource: String?
var frames: [Frame] = [Frame]()
}
================================================
FILE: NeRFCapture/Networking/CycloneDDS-Bridging-Header.h
================================================
//
// CycloneDDS-Bridging-Header.h
// NeRFCapture
//
// Created by Jad Abou-Chakra on 9/12/2022.
//
#ifndef CycloneDDS_Bridging_Header_h
#define CycloneDDS_Bridging_Header_h
#include <dds/dds.h>
#include "NeRFCaptureData.h"
//const dds_topic_descriptor_t* NeRFCaptureData_NeRFCaptureFrame_desc_ptr = &NeRFCaptureData_NeRFCaptureFrame_desc;
#endif /* CycloneDDS_Bridging_Header_h */
================================================
FILE: NeRFCapture/Preview Content/Preview Assets.xcassets/Contents.json
================================================
{
"info" : {
"version" : 1,
"author" : "xcode"
}
}
================================================
FILE: NeRFCapture/Shaders.metal
================================================
//
// Shaders.metal
// NeRFCapture
//
// Created by Jad Abou-Chakra on 19/12/2022.
//
#include <metal_stdlib>
using namespace metal;
kernel void yuv2rgb_kernel(texture2d<float, access::sample> capturedImageTextureY [[ texture(0) ]],
texture2d<float, access::sample> capturedImageTextureCbCr [[ texture(1) ]],
device uint8_t* result [[buffer(0)]],
uint2 position [[thread_position_in_grid]]) {
const auto textureSize = ushort2(capturedImageTextureY.get_width(),
capturedImageTextureY.get_height());
constexpr sampler colorSampler(mip_filter::linear,
mag_filter::linear,
min_filter::linear);
const float4x4 ycbcrToRGBTransform = float4x4(
float4(+1.0000f, +1.0000f, +1.0000f, +0.0000f),
float4(+0.0000f, -0.3441f, +1.7720f, +0.0000f),
float4(+1.4020f, -0.7141f, +0.0000f, +0.0000f),
float4(-0.7010f, +0.5291f, -0.8860f, +1.0000f)
);
float2 texCoord = float2(position)/float2(textureSize);
// Sample Y and CbCr textures to get the YCbCr color at the given texture coordinate
float4 ycbcr = float4(capturedImageTextureY.sample(colorSampler, texCoord).r,
capturedImageTextureCbCr.sample(colorSampler, texCoord).rg, 1.0);
// Return converted RGB color
float4 res = ycbcrToRGBTransform * ycbcr * 255.0;
int ind = position.y*textureSize[0] + position.x;
result[3*ind + 0] = res[0];
result[3*ind + 1] = res[1];
result[3*ind + 2] = res[2];
}
================================================
FILE: NeRFCapture/Utils.swift
================================================
//
// Utils.swift
// NeRFCapture
//
// Created by Jad Abou-Chakra on 13/7/2022.
//
import Foundation
import ARKit
func trackingStateToString(_ trackingState: ARCamera.TrackingState) -> String {
switch trackingState {
case .notAvailable: return "Not Available"
case .normal: return "Tracking Normal"
case .limited(.excessiveMotion): return "Excessive Motion"
case .limited(.initializing): return "Tracking Initializing"
case .limited(.insufficientFeatures): return "Insufficient Features"
default: return "Unknown"
}
}
func tupleFromTransform(_ t: matrix_float4x4) -> (Float, Float, Float, Float, Float, Float, Float, Float, Float, Float, Float, Float, Float, Float, Float, Float) {
let tuple = (t.columns.0.x, t.columns.0.y, t.columns.0.z, t.columns.0.w,
t.columns.1.x, t.columns.1.y, t.columns.1.z, t.columns.1.w,
t.columns.2.x, t.columns.2.y, t.columns.2.z, t.columns.2.w,
t.columns.3.x, t.columns.3.y, t.columns.3.z, t.columns.3.w
)
return tuple
}
func arrayFromTransform(_ transform: matrix_float4x4) -> [[Float]] {
var array: [[Float]] = Array(repeating: Array(repeating:Float(), count: 4), count: 4)
array[0] = [transform.columns.0.x, transform.columns.1.x, transform.columns.2.x, transform.columns.3.x]
array[1] = [transform.columns.0.y, transform.columns.1.y, transform.columns.2.y, transform.columns.3.y]
array[2] = [transform.columns.0.z, transform.columns.1.z, transform.columns.2.z, transform.columns.3.z]
array[3] = [transform.columns.0.w, transform.columns.1.w, transform.columns.2.w, transform.columns.3.w]
return array
}
func arrayFromTransform(_ transform: matrix_float3x3) -> [[Float]] {
var array: [[Float]] = Array(repeating: Array(repeating:Float(), count: 3), count: 3)
array[0] = [transform.columns.0.x, transform.columns.1.x, transform.columns.2.x]
array[1] = [transform.columns.0.y, transform.columns.1.y, transform.columns.2.y]
array[2] = [transform.columns.0.z, transform.columns.1.z, transform.columns.2.z]
return array
}
func pixelBufferToUIImage(pixelBuffer: CVPixelBuffer) -> UIImage {
let ciImage = CIImage(cvPixelBuffer: pixelBuffer)
let context = CIContext(options: nil)
let cgImage = context.createCGImage(ciImage, from: ciImage.extent)
let uiImage = UIImage(cgImage: cgImage!)
return uiImage
}
func getDocumentsDirectory() -> URL {
let paths = FileManager.default.urls(for: .documentDirectory, in: .userDomainMask)
return paths[0]
}
class YUVToRGBFilter {
var device: MTLDevice
var defaultLib: MTLLibrary?
var shader: MTLFunction?
var commandQueue: MTLCommandQueue?
var commandEncoder: MTLComputeCommandEncoder?
var pipelineState: MTLComputePipelineState?
var width: UInt32 = 0
var height: UInt32 = 0
let threadsPerBlock = MTLSize(width: 16, height: 16, depth: 1)
var capturedImagePipelineState: MTLRenderPipelineState!
var capturedImageTextureY: CVMetalTexture?
var capturedImageTextureCbCr: CVMetalTexture?
var capturedImageTextureCache: CVMetalTextureCache!
var rgbBuffer: MTLBuffer!
init() {
self.device = MTLCreateSystemDefaultDevice()!
self.defaultLib = self.device.makeDefaultLibrary()
self.shader = self.defaultLib?.makeFunction(name: "yuv2rgb_kernel")
self.commandQueue = self.device.makeCommandQueue()
// Create captured image texture cache
var textureCache: CVMetalTextureCache?
CVMetalTextureCacheCreate(nil, nil, self.device, nil, &textureCache)
self.capturedImageTextureCache = textureCache
if let shader = self.shader {
do {
try self.pipelineState = self.device.makeComputePipelineState(function: shader)
} catch {
fatalError("unable to make compute pipeline")
}
}
else {
fatalError("unable to make compute pipeline")
}
}
func getBlockDimensions() -> MTLSize {
let blockWidth = Int(width) / self.threadsPerBlock.width
let blockHeight = Int(height) / self.threadsPerBlock.height
return MTLSizeMake(blockWidth, blockHeight, 1)
}
func createTexture(fromPixelBuffer pixelBuffer: CVPixelBuffer, pixelFormat: MTLPixelFormat, planeIndex: Int) -> CVMetalTexture? {
let width = CVPixelBufferGetWidthOfPlane(pixelBuffer, planeIndex)
let height = CVPixelBufferGetHeightOfPlane(pixelBuffer, planeIndex)
var texture: CVMetalTexture? = nil
let status = CVMetalTextureCacheCreateTextureFromImage(nil, capturedImageTextureCache, pixelBuffer, nil, pixelFormat, width, height, planeIndex, &texture)
if status != kCVReturnSuccess {
texture = nil
}
return texture
}
func updateCapturedImageTextures(frame: ARFrame) {
// Create two textures (Y and CbCr) from the provided frame's captured image
let pixelBuffer = frame.capturedImage
if (CVPixelBufferGetPlaneCount(pixelBuffer) < 2) {
return
}
capturedImageTextureY = createTexture(fromPixelBuffer: pixelBuffer, pixelFormat:.r8Unorm, planeIndex:0)
capturedImageTextureCbCr = createTexture(fromPixelBuffer: pixelBuffer, pixelFormat:.rg8Unorm, planeIndex:1)
let w = Int(frame.camera.imageResolution.width)
let h = Int(frame.camera.imageResolution.height)
if(w != self.width || h != self.height) {
rgbBuffer = device.makeBuffer(length: w*h*3, options: .storageModeShared)
}
width = UInt32(w)
height = UInt32(h)
}
func applyFilter(frame:ARFrame) {
updateCapturedImageTextures(frame: frame)
guard let buffer = self.commandQueue?.makeCommandBuffer(), let encoder = buffer.makeComputeCommandEncoder() else {
return;
}
encoder.setComputePipelineState(self.pipelineState!)
encoder.setTextures([CVMetalTextureGetTexture(capturedImageTextureY!), CVMetalTextureGetTexture(capturedImageTextureCbCr!)], range: 0..<2)
encoder.setBuffer(rgbBuffer, offset: 0, index: 0)
encoder.dispatchThreadgroups(self.getBlockDimensions(), threadsPerThreadgroup: threadsPerBlock)
encoder.endEncoding()
buffer.commit()
buffer.waitUntilCompleted()
}
}
================================================
FILE: NeRFCapture/ViewModels/ARViewModel.swift
================================================
//
// ARViewModel.swift
// NeRFCapture
//
// Created by Jad Abou-Chakra on 13/7/2022.
//
import Foundation
import Zip
import Combine
import ARKit
import RealityKit
enum AppError : Error {
case projectAlreadyExists
case manifestInitializationFailed
}
class ARViewModel : NSObject, ARSessionDelegate, ObservableObject {
@Published var appState = AppState()
var session: ARSession? = nil
var arView: ARView? = nil
// let frameSubject = PassthroughSubject<ARFrame, Never>()
var cancellables = Set<AnyCancellable>()
let datasetWriter: DatasetWriter
let ddsWriter: DDSWriter
init(datasetWriter: DatasetWriter, ddsWriter: DDSWriter) {
self.datasetWriter = datasetWriter
self.ddsWriter = ddsWriter
super.init()
self.setupObservers()
self.ddsWriter.setupDDS()
}
func setupObservers() {
datasetWriter.$writerState.sink {x in self.appState.writerState = x} .store(in: &cancellables)
datasetWriter.$currentFrameCounter.sink { x in self.appState.numFrames = x }.store(in: &cancellables)
ddsWriter.$peers.sink {x in self.appState.ddsPeers = UInt32(x)}.store(in: &cancellables)
$appState
.map(\.appMode)
.prepend(appState.appMode)
.removeDuplicates()
.sink { x in
switch x {
case .Offline:
// self.appState.stream = false
print("Changed to offline")
case .Online:
print("Changed to online")
}
}
.store(in: &cancellables)
// frameSubject.throttle(for: 0.5, scheduler: RunLoop.main, latest: true).sink {
// f in
// if self.appState.stream && self.appState.appMode == .Online {
// self.ddsWriter.writeFrameToTopic(frame: f)
// }
// }.store(in: &cancellables)
}
func createARConfiguration() -> ARWorldTrackingConfiguration {
let configuration = ARWorldTrackingConfiguration()
configuration.worldAlignment = .gravity
if type(of: configuration).supportsFrameSemantics(.sceneDepth) {
// Activate sceneDepth
configuration.frameSemantics = .sceneDepth
}
return configuration
}
func resetWorldOrigin() {
session?.pause()
let config = createARConfiguration()
session?.run(config, options: [.resetTracking])
}
func session(
_ session: ARSession,
didUpdate frame: ARFrame
) {
// frameSubject.send(frame)
}
func session(_ session: ARSession, cameraDidChangeTrackingState camera: ARCamera) {
self.appState.trackingState = trackingStateToString(camera.trackingState)
}
}
================================================
FILE: NeRFCapture/Views/ARViewContainer.swift
================================================
//
// ARView.swift
// NeRFCapture
//
// Created by Jad Abou-Chakra on 13/7/2022.
//
import SwiftUI
import RealityKit
import ARKit
struct ARViewContainer: UIViewRepresentable {
@ObservedObject var viewModel: ARViewModel
init(_ vm: ARViewModel) {
viewModel = vm
}
func makeUIView(context: Context) -> ARView {
let arView = ARView(frame: .zero)
let configuration = viewModel.createARConfiguration()
configuration.worldAlignment = .gravity
configuration.isAutoFocusEnabled = true
// configuration.videoFormat = ARWorldTrackingConfiguration.supportedVideoFormats[4] // 1280x720
if ARWorldTrackingConfiguration.supportsFrameSemantics(.sceneDepth) {
viewModel.appState.supportsDepth = true
}
arView.debugOptions = [.showWorldOrigin]
#if !targetEnvironment(simulator)
arView.session.run(configuration)
#endif
arView.session.delegate = viewModel
viewModel.session = arView.session
viewModel.arView = arView
return arView
}
func updateUIView(_ uiView: ARView, context: Context) {}
}
================================================
FILE: NeRFCapture/Views/ContentView.swift
================================================
//
// ContentView.swift
// NeRFCapture
//
// Created by Jad Abou-Chakra on 13/7/2022.
//
import SwiftUI
import ARKit
import RealityKit
struct ContentView : View {
@StateObject private var viewModel: ARViewModel
@State private var showSheet: Bool = false
init(viewModel vm: ARViewModel) {
_viewModel = StateObject(wrappedValue: vm)
}
var body: some View {
ZStack{
ZStack(alignment: .topTrailing) {
ARViewContainer(viewModel).edgesIgnoringSafeArea(.all)
VStack() {
ZStack() {
HStack() {
// Button() {
// showSheet.toggle()
// } label: {
// Image(systemName: "gearshape.fill")
// .imageScale(.large)
// }
// .padding(.leading, 16)
// .buttonStyle(.borderless)
// .sheet(isPresented: $showSheet) {
// VStack() {
// Text("Settings")
// Spacer()
// }
// .presentationDetents([.medium])
// }
// Spacer()
}
HStack() {
Spacer()
Picker("Mode", selection: $viewModel.appState.appMode) {
Text("Online").tag(AppMode.Online)
Text("Offline").tag(AppMode.Offline)
}
.frame(maxWidth: 200)
.padding(0)
.pickerStyle(.segmented)
.disabled(viewModel.appState.writerState
!= .SessionNotStarted)
Spacer()
}
}.padding(8)
HStack() {
Spacer()
VStack(alignment:.leading) {
Text("\(viewModel.appState.trackingState)")
if case .Online = viewModel.appState.appMode {
Text("\(viewModel.appState.ddsPeers) Connection(s)")
}
if case .Offline = viewModel.appState.appMode {
if case .SessionStarted = viewModel.appState.writerState {
Text("\(viewModel.datasetWriter.currentFrameCounter) Frames")
}
}
if viewModel.appState.supportsDepth {
Text("Depth Supported")
}
}.padding()
}
}
}
VStack {
Spacer()
HStack(spacing: 20) {
if case .Online = viewModel.appState.appMode {
Spacer()
Button(action: {
viewModel.resetWorldOrigin()
}) {
Text("Reset")
.padding(.horizontal, 20)
.padding(.vertical, 5)
}
.buttonStyle(.bordered)
.buttonBorderShape(.capsule)
Button(action: {
if let frame = viewModel.session?.currentFrame {
viewModel.ddsWriter.writeFrameToTopic(frame: frame)
}
}) {
Text("Send")
.padding(.horizontal, 20)
.padding(.vertical, 5)
}
.buttonStyle(.borderedProminent)
.buttonBorderShape(.capsule)
}
if case .Offline = viewModel.appState.appMode {
if viewModel.appState.writerState == .SessionNotStarted {
Spacer()
Button(action: {
viewModel.resetWorldOrigin()
}) {
Text("Reset")
.padding(.horizontal, 20)
.padding(.vertical, 5)
}
.buttonStyle(.bordered)
.buttonBorderShape(.capsule)
Button(action: {
do {
try viewModel.datasetWriter.initializeProject()
}
catch {
print("\(error)")
}
}) {
Text("Start")
.padding(.horizontal, 20)
.padding(.vertical, 5)
}
.buttonStyle(.borderedProminent)
.buttonBorderShape(.capsule)
}
if viewModel.appState.writerState == .SessionStarted {
Spacer()
Button(action: {
viewModel.datasetWriter.finalizeProject()
}) {
Text("End")
.padding(.horizontal, 20)
.padding(.vertical, 5)
}
.buttonStyle(.bordered)
.buttonBorderShape(.capsule)
Button(action: {
if let frame = viewModel.session?.currentFrame {
viewModel.datasetWriter.writeFrameToDisk(frame: frame)
}
}) {
Text("Save Frame")
.padding(.horizontal, 20)
.padding(.vertical, 5)
}
.buttonStyle(.borderedProminent)
.buttonBorderShape(.capsule)
}
}
}
.padding()
}
.preferredColorScheme(.dark)
}
}
}
#if DEBUG
struct ContentView_Previews : PreviewProvider {
static var previews: some View {
ContentView(viewModel: ARViewModel(datasetWriter: DatasetWriter(), ddsWriter: DDSWriter()))
.previewInterfaceOrientation(.portrait)
}
}
#endif
================================================
FILE: NeRFCapture.xcodeproj/project.pbxproj
================================================
// !$*UTF8*$!
{
archiveVersion = 1;
classes = {
};
objectVersion = 55;
objects = {
/* Begin PBXBuildFile section */
050A8EFA296E738300C5C638 /* DatasetWriter.swift in Sources */ = {isa = PBXBuildFile; fileRef = 050A8EF9296E738200C5C638 /* DatasetWriter.swift */; };
050A8EFC296E8B8E00C5C638 /* DDSWriter.swift in Sources */ = {isa = PBXBuildFile; fileRef = 050A8EFB296E8B8E00C5C638 /* DDSWriter.swift */; };
0512301F29499F50003915D9 /* NeRFCaptureData.c in Sources */ = {isa = PBXBuildFile; fileRef = 0512301C29499F4F003915D9 /* NeRFCaptureData.c */; };
0512302029499F50003915D9 /* NeRFCaptureData.idl in Resources */ = {isa = PBXBuildFile; fileRef = 0512301D29499F4F003915D9 /* NeRFCaptureData.idl */; };
0539EAB12965458E00C51AB9 /* Zip in Frameworks */ = {isa = PBXBuildFile; productRef = 0539EAB02965458E00C51AB9 /* Zip */; };
0539EAB429654B0400C51AB9 /* cyclonedds in Frameworks */ = {isa = PBXBuildFile; productRef = 0539EAB329654B0400C51AB9 /* cyclonedds */; };
057A7AA82950092E000692A0 /* Shaders.metal in Sources */ = {isa = PBXBuildFile; fileRef = 057A7AA72950092E000692A0 /* Shaders.metal */; };
C0C52A90287E7F940011820F /* AppDelegate.swift in Sources */ = {isa = PBXBuildFile; fileRef = C0C52A8F287E7F940011820F /* AppDelegate.swift */; };
C0C52A92287E7F940011820F /* ContentView.swift in Sources */ = {isa = PBXBuildFile; fileRef = C0C52A91287E7F940011820F /* ContentView.swift */; };
C0C52A96287E7F960011820F /* Assets.xcassets in Resources */ = {isa = PBXBuildFile; fileRef = C0C52A95287E7F960011820F /* Assets.xcassets */; };
C0C52A99287E7F960011820F /* Preview Assets.xcassets in Resources */ = {isa = PBXBuildFile; fileRef = C0C52A98287E7F960011820F /* Preview Assets.xcassets */; };
C0C52AA3287EC19A0011820F /* Utils.swift in Sources */ = {isa = PBXBuildFile; fileRef = C0C52AA2287EC19A0011820F /* Utils.swift */; };
C0C52AA8287EC2390011820F /* ARViewModel.swift in Sources */ = {isa = PBXBuildFile; fileRef = C0C52AA7287EC2390011820F /* ARViewModel.swift */; };
C0C52AAA287EC2470011820F /* ARViewContainer.swift in Sources */ = {isa = PBXBuildFile; fileRef = C0C52AA9287EC2470011820F /* ARViewContainer.swift */; };
C0C52AAC287EC26A0011820F /* Manifest.swift in Sources */ = {isa = PBXBuildFile; fileRef = C0C52AAB287EC26A0011820F /* Manifest.swift */; };
C0C52AAE287EC33D0011820F /* AppState.swift in Sources */ = {isa = PBXBuildFile; fileRef = C0C52AAD287EC33D0011820F /* AppState.swift */; };
/* End PBXBuildFile section */
/* Begin PBXCopyFilesBuildPhase section */
0531C0AA293735DE005F0929 /* Embed Frameworks */ = {
isa = PBXCopyFilesBuildPhase;
buildActionMask = 2147483647;
dstPath = "";
dstSubfolderSpec = 10;
files = (
);
name = "Embed Frameworks";
runOnlyForDeploymentPostprocessing = 0;
};
/* End PBXCopyFilesBuildPhase section */
/* Begin PBXFileReference section */
050A8EF9296E738200C5C638 /* DatasetWriter.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = DatasetWriter.swift; sourceTree = "<group>"; };
050A8EFB296E8B8E00C5C638 /* DDSWriter.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = DDSWriter.swift; sourceTree = "<group>"; };
051230122942EE07003915D9 /* CycloneDDS-Bridging-Header.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = "CycloneDDS-Bridging-Header.h"; sourceTree = "<group>"; };
0512301C29499F4F003915D9 /* NeRFCaptureData.c */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.c; path = NeRFCaptureData.c; sourceTree = "<group>"; };
0512301D29499F4F003915D9 /* NeRFCaptureData.idl */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = text; path = NeRFCaptureData.idl; sourceTree = "<group>"; };
0512301E29499F50003915D9 /* NeRFCaptureData.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = NeRFCaptureData.h; sourceTree = "<group>"; };
052D2374287FD53A0083DC5B /* Info.plist */ = {isa = PBXFileReference; lastKnownFileType = text.plist; path = Info.plist; sourceTree = "<group>"; };
057A7AA72950092E000692A0 /* Shaders.metal */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.metal; path = Shaders.metal; sourceTree = "<group>"; };
C0C52A8C287E7F940011820F /* NeRFCapture.app */ = {isa = PBXFileReference; explicitFileType = wrapper.application; includeInIndex = 0; path = NeRFCapture.app; sourceTree = BUILT_PRODUCTS_DIR; };
C0C52A8F287E7F940011820F /* AppDelegate.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = AppDelegate.swift; sourceTree = "<group>"; };
C0C52A91287E7F940011820F /* ContentView.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ContentView.swift; sourceTree = "<group>"; };
C0C52A95287E7F960011820F /* Assets.xcassets */ = {isa = PBXFileReference; lastKnownFileType = folder.assetcatalog; path = Assets.xcassets; sourceTree = "<group>"; };
C0C52A98287E7F960011820F /* Preview Assets.xcassets */ = {isa = PBXFileReference; lastKnownFileType = folder.assetcatalog; path = "Preview Assets.xcassets"; sourceTree = "<group>"; };
C0C52AA2287EC19A0011820F /* Utils.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = Utils.swift; sourceTree = "<group>"; };
C0C52AA7287EC2390011820F /* ARViewModel.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ARViewModel.swift; sourceTree = "<group>"; };
C0C52AA9287EC2470011820F /* ARViewContainer.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ARViewContainer.swift; sourceTree = "<group>"; };
C0C52AAB287EC26A0011820F /* Manifest.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = Manifest.swift; sourceTree = "<group>"; };
C0C52AAD287EC33D0011820F /* AppState.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = AppState.swift; sourceTree = "<group>"; };
/* End PBXFileReference section */
/* Begin PBXFrameworksBuildPhase section */
C0C52A89287E7F940011820F /* Frameworks */ = {
isa = PBXFrameworksBuildPhase;
buildActionMask = 2147483647;
files = (
0539EAB429654B0400C51AB9 /* cyclonedds in Frameworks */,
0539EAB12965458E00C51AB9 /* Zip in Frameworks */,
);
runOnlyForDeploymentPostprocessing = 0;
};
/* End PBXFrameworksBuildPhase section */
/* Begin PBXGroup section */
0531C0A6293735DD005F0929 /* Frameworks */ = {
isa = PBXGroup;
children = (
);
name = Frameworks;
sourceTree = "<group>";
};
057E7765293816800014E461 /* Networking */ = {
isa = PBXGroup;
children = (
051230122942EE07003915D9 /* CycloneDDS-Bridging-Header.h */,
);
path = Networking;
sourceTree = "<group>";
};
057E776C293825FE0014E461 /* Packages */ = {
isa = PBXGroup;
children = (
);
name = Packages;
sourceTree = "<group>";
};
057E7774293827F60014E461 /* DDSModel */ = {
isa = PBXGroup;
children = (
0512301C29499F4F003915D9 /* NeRFCaptureData.c */,
0512301E29499F50003915D9 /* NeRFCaptureData.h */,
0512301D29499F4F003915D9 /* NeRFCaptureData.idl */,
);
path = DDSModel;
sourceTree = "<group>";
};
C0C52A83287E7F940011820F = {
isa = PBXGroup;
children = (
057E776C293825FE0014E461 /* Packages */,
C0C52A8E287E7F940011820F /* NeRFCapture */,
C0C52A8D287E7F940011820F /* Products */,
0531C0A6293735DD005F0929 /* Frameworks */,
);
sourceTree = "<group>";
};
C0C52A8D287E7F940011820F /* Products */ = {
isa = PBXGroup;
children = (
C0C52A8C287E7F940011820F /* NeRFCapture.app */,
);
name = Products;
sourceTree = "<group>";
};
C0C52A8E287E7F940011820F /* NeRFCapture */ = {
isa = PBXGroup;
children = (
057E7774293827F60014E461 /* DDSModel */,
057E7765293816800014E461 /* Networking */,
052D2374287FD53A0083DC5B /* Info.plist */,
C0C52AA6287EC2090011820F /* Models */,
C0C52AA5287EC2040011820F /* Views */,
C0C52AA4287EC1FA0011820F /* ViewModels */,
C0C52A8F287E7F940011820F /* AppDelegate.swift */,
C0C52AA2287EC19A0011820F /* Utils.swift */,
C0C52A95287E7F960011820F /* Assets.xcassets */,
C0C52A97287E7F960011820F /* Preview Content */,
057A7AA72950092E000692A0 /* Shaders.metal */,
050A8EF9296E738200C5C638 /* DatasetWriter.swift */,
050A8EFB296E8B8E00C5C638 /* DDSWriter.swift */,
);
path = NeRFCapture;
sourceTree = "<group>";
};
C0C52A97287E7F960011820F /* Preview Content */ = {
isa = PBXGroup;
children = (
C0C52A98287E7F960011820F /* Preview Assets.xcassets */,
);
path = "Preview Content";
sourceTree = "<group>";
};
C0C52AA4287EC1FA0011820F /* ViewModels */ = {
isa = PBXGroup;
children = (
C0C52AA7287EC2390011820F /* ARViewModel.swift */,
);
path = ViewModels;
sourceTree = "<group>";
};
C0C52AA5287EC2040011820F /* Views */ = {
isa = PBXGroup;
children = (
C0C52A91287E7F940011820F /* ContentView.swift */,
C0C52AA9287EC2470011820F /* ARViewContainer.swift */,
);
path = Views;
sourceTree = "<group>";
};
C0C52AA6287EC2090011820F /* Models */ = {
isa = PBXGroup;
children = (
C0C52AAB287EC26A0011820F /* Manifest.swift */,
C0C52AAD287EC33D0011820F /* AppState.swift */,
);
path = Models;
sourceTree = "<group>";
};
/* End PBXGroup section */
/* Begin PBXNativeTarget section */
C0C52A8B287E7F940011820F /* NeRFCapture */ = {
isa = PBXNativeTarget;
buildConfigurationList = C0C52A9C287E7F960011820F /* Build configuration list for PBXNativeTarget "NeRFCapture" */;
buildPhases = (
C0C52A88287E7F940011820F /* Sources */,
C0C52A89287E7F940011820F /* Frameworks */,
C0C52A8A287E7F940011820F /* Resources */,
0531C0AA293735DE005F0929 /* Embed Frameworks */,
);
buildRules = (
);
dependencies = (
);
name = NeRFCapture;
packageProductDependencies = (
0539EAB02965458E00C51AB9 /* Zip */,
0539EAB329654B0400C51AB9 /* cyclonedds */,
);
productName = NeRFCapture;
productReference = C0C52A8C287E7F940011820F /* NeRFCapture.app */;
productType = "com.apple.product-type.application";
};
/* End PBXNativeTarget section */
/* Begin PBXProject section */
C0C52A84287E7F940011820F /* Project object */ = {
isa = PBXProject;
attributes = {
BuildIndependentTargetsInParallel = 1;
LastSwiftUpdateCheck = 1340;
LastUpgradeCheck = 1340;
TargetAttributes = {
C0C52A8B287E7F940011820F = {
CreatedOnToolsVersion = 13.4.1;
};
};
};
buildConfigurationList = C0C52A87287E7F940011820F /* Build configuration list for PBXProject "NeRFCapture" */;
compatibilityVersion = "Xcode 13.0";
developmentRegion = en;
hasScannedForEncodings = 0;
knownRegions = (
en,
Base,
);
mainGroup = C0C52A83287E7F940011820F;
packageReferences = (
0539EAAF2965458E00C51AB9 /* XCRemoteSwiftPackageReference "Zip" */,
0539EAB229654B0400C51AB9 /* XCRemoteSwiftPackageReference "CycloneDDSPrebuild" */,
);
productRefGroup = C0C52A8D287E7F940011820F /* Products */;
projectDirPath = "";
projectRoot = "";
targets = (
C0C52A8B287E7F940011820F /* NeRFCapture */,
);
};
/* End PBXProject section */
/* Begin PBXResourcesBuildPhase section */
C0C52A8A287E7F940011820F /* Resources */ = {
isa = PBXResourcesBuildPhase;
buildActionMask = 2147483647;
files = (
0512302029499F50003915D9 /* NeRFCaptureData.idl in Resources */,
C0C52A99287E7F960011820F /* Preview Assets.xcassets in Resources */,
C0C52A96287E7F960011820F /* Assets.xcassets in Resources */,
);
runOnlyForDeploymentPostprocessing = 0;
};
/* End PBXResourcesBuildPhase section */
/* Begin PBXSourcesBuildPhase section */
C0C52A88287E7F940011820F /* Sources */ = {
isa = PBXSourcesBuildPhase;
buildActionMask = 2147483647;
files = (
C0C52AAA287EC2470011820F /* ARViewContainer.swift in Sources */,
050A8EFA296E738300C5C638 /* DatasetWriter.swift in Sources */,
C0C52A92287E7F940011820F /* ContentView.swift in Sources */,
C0C52AAE287EC33D0011820F /* AppState.swift in Sources */,
0512301F29499F50003915D9 /* NeRFCaptureData.c in Sources */,
C0C52AAC287EC26A0011820F /* Manifest.swift in Sources */,
C0C52A90287E7F940011820F /* AppDelegate.swift in Sources */,
050A8EFC296E8B8E00C5C638 /* DDSWriter.swift in Sources */,
057A7AA82950092E000692A0 /* Shaders.metal in Sources */,
C0C52AA3287EC19A0011820F /* Utils.swift in Sources */,
C0C52AA8287EC2390011820F /* ARViewModel.swift in Sources */,
);
runOnlyForDeploymentPostprocessing = 0;
};
/* End PBXSourcesBuildPhase section */
/* Begin XCBuildConfiguration section */
C0C52A9A287E7F960011820F /* Debug */ = {
isa = XCBuildConfiguration;
buildSettings = {
ALWAYS_SEARCH_USER_PATHS = NO;
CLANG_ANALYZER_NONNULL = YES;
CLANG_ANALYZER_NUMBER_OBJECT_CONVERSION = YES_AGGRESSIVE;
CLANG_CXX_LANGUAGE_STANDARD = "gnu++17";
CLANG_ENABLE_MODULES = YES;
CLANG_ENABLE_OBJC_ARC = YES;
CLANG_ENABLE_OBJC_WEAK = YES;
CLANG_WARN_BLOCK_CAPTURE_AUTORELEASING = YES;
CLANG_WARN_BOOL_CONVERSION = YES;
CLANG_WARN_COMMA = YES;
CLANG_WARN_CONSTANT_CONVERSION = YES;
CLANG_WARN_DEPRECATED_OBJC_IMPLEMENTATIONS = YES;
CLANG_WARN_DIRECT_OBJC_ISA_USAGE = YES_ERROR;
CLANG_WARN_DOCUMENTATION_COMMENTS = YES;
CLANG_WARN_EMPTY_BODY = YES;
CLANG_WARN_ENUM_CONVERSION = YES;
CLANG_WARN_INFINITE_RECURSION = YES;
CLANG_WARN_INT_CONVERSION = YES;
CLANG_WARN_NON_LITERAL_NULL_CONVERSION = YES;
CLANG_WARN_OBJC_IMPLICIT_RETAIN_SELF = YES;
CLANG_WARN_OBJC_LITERAL_CONVERSION = YES;
CLANG_WARN_OBJC_ROOT_CLASS = YES_ERROR;
CLANG_WARN_QUOTED_INCLUDE_IN_FRAMEWORK_HEADER = YES;
CLANG_WARN_RANGE_LOOP_ANALYSIS = YES;
CLANG_WARN_STRICT_PROTOTYPES = YES;
CLANG_WARN_SUSPICIOUS_MOVE = YES;
CLANG_WARN_UNGUARDED_AVAILABILITY = YES_AGGRESSIVE;
CLANG_WARN_UNREACHABLE_CODE = YES;
CLANG_WARN__DUPLICATE_METHOD_MATCH = YES;
COPY_PHASE_STRIP = NO;
DEBUG_INFORMATION_FORMAT = dwarf;
ENABLE_STRICT_OBJC_MSGSEND = YES;
ENABLE_TESTABILITY = YES;
GCC_C_LANGUAGE_STANDARD = gnu11;
GCC_DYNAMIC_NO_PIC = NO;
GCC_NO_COMMON_BLOCKS = YES;
GCC_OPTIMIZATION_LEVEL = 0;
GCC_PREPROCESSOR_DEFINITIONS = (
"DEBUG=1",
"$(inherited)",
);
GCC_WARN_64_TO_32_BIT_CONVERSION = YES;
GCC_WARN_ABOUT_RETURN_TYPE = YES_ERROR;
GCC_WARN_UNDECLARED_SELECTOR = YES;
GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE;
GCC_WARN_UNUSED_FUNCTION = YES;
GCC_WARN_UNUSED_VARIABLE = YES;
IPHONEOS_DEPLOYMENT_TARGET = 15.0;
MTL_ENABLE_DEBUG_INFO = INCLUDE_SOURCE;
MTL_FAST_MATH = YES;
ONLY_ACTIVE_ARCH = YES;
SDKROOT = iphoneos;
SWIFT_ACTIVE_COMPILATION_CONDITIONS = DEBUG;
SWIFT_ENFORCE_EXCLUSIVE_ACCESS = on;
SWIFT_OPTIMIZATION_LEVEL = "-Onone";
};
name = Debug;
};
C0C52A9B287E7F960011820F /* Release */ = {
isa = XCBuildConfiguration;
buildSettings = {
ALWAYS_SEARCH_USER_PATHS = NO;
CLANG_ANALYZER_NONNULL = YES;
CLANG_ANALYZER_NUMBER_OBJECT_CONVERSION = YES_AGGRESSIVE;
CLANG_CXX_LANGUAGE_STANDARD = "gnu++17";
CLANG_ENABLE_MODULES = YES;
CLANG_ENABLE_OBJC_ARC = YES;
CLANG_ENABLE_OBJC_WEAK = YES;
CLANG_WARN_BLOCK_CAPTURE_AUTORELEASING = YES;
CLANG_WARN_BOOL_CONVERSION = YES;
CLANG_WARN_COMMA = YES;
CLANG_WARN_CONSTANT_CONVERSION = YES;
CLANG_WARN_DEPRECATED_OBJC_IMPLEMENTATIONS = YES;
CLANG_WARN_DIRECT_OBJC_ISA_USAGE = YES_ERROR;
CLANG_WARN_DOCUMENTATION_COMMENTS = YES;
CLANG_WARN_EMPTY_BODY = YES;
CLANG_WARN_ENUM_CONVERSION = YES;
CLANG_WARN_INFINITE_RECURSION = YES;
CLANG_WARN_INT_CONVERSION = YES;
CLANG_WARN_NON_LITERAL_NULL_CONVERSION = YES;
CLANG_WARN_OBJC_IMPLICIT_RETAIN_SELF = YES;
CLANG_WARN_OBJC_LITERAL_CONVERSION = YES;
CLANG_WARN_OBJC_ROOT_CLASS = YES_ERROR;
CLANG_WARN_QUOTED_INCLUDE_IN_FRAMEWORK_HEADER = YES;
CLANG_WARN_RANGE_LOOP_ANALYSIS = YES;
CLANG_WARN_STRICT_PROTOTYPES = YES;
CLANG_WARN_SUSPICIOUS_MOVE = YES;
CLANG_WARN_UNGUARDED_AVAILABILITY = YES_AGGRESSIVE;
CLANG_WARN_UNREACHABLE_CODE = YES;
CLANG_WARN__DUPLICATE_METHOD_MATCH = YES;
COPY_PHASE_STRIP = NO;
DEBUG_INFORMATION_FORMAT = "dwarf-with-dsym";
ENABLE_NS_ASSERTIONS = NO;
ENABLE_STRICT_OBJC_MSGSEND = YES;
GCC_C_LANGUAGE_STANDARD = gnu11;
GCC_NO_COMMON_BLOCKS = YES;
GCC_WARN_64_TO_32_BIT_CONVERSION = YES;
GCC_WARN_ABOUT_RETURN_TYPE = YES_ERROR;
GCC_WARN_UNDECLARED_SELECTOR = YES;
GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE;
GCC_WARN_UNUSED_FUNCTION = YES;
GCC_WARN_UNUSED_VARIABLE = YES;
IPHONEOS_DEPLOYMENT_TARGET = 15.0;
MTL_ENABLE_DEBUG_INFO = NO;
MTL_FAST_MATH = YES;
SDKROOT = iphoneos;
SWIFT_COMPILATION_MODE = wholemodule;
SWIFT_ENFORCE_EXCLUSIVE_ACCESS = on;
SWIFT_OPTIMIZATION_LEVEL = "-O";
VALIDATE_PRODUCT = YES;
};
name = Release;
};
C0C52A9D287E7F960011820F /* Debug */ = {
isa = XCBuildConfiguration;
buildSettings = {
ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon;
ASSETCATALOG_COMPILER_GLOBAL_ACCENT_COLOR_NAME = AccentColor;
CODE_SIGN_IDENTITY = "Apple Development";
CODE_SIGN_STYLE = Automatic;
CURRENT_PROJECT_VERSION = 1;
DEVELOPMENT_ASSET_PATHS = "\"NeRFCapture/Preview Content\"";
DEVELOPMENT_TEAM = 2DFGSA53H4;
ENABLE_PREVIEWS = YES;
GENERATE_INFOPLIST_FILE = YES;
INFOPLIST_FILE = NeRFCapture/Info.plist;
INFOPLIST_KEY_CFBundleDisplayName = "NeRF Capture";
INFOPLIST_KEY_LSApplicationCategoryType = "public.app-category.education";
INFOPLIST_KEY_LSSupportsOpeningDocumentsInPlace = YES;
INFOPLIST_KEY_NSCameraUsageDescription = "";
INFOPLIST_KEY_UIApplicationSupportsIndirectInputEvents = YES;
INFOPLIST_KEY_UILaunchStoryboardName = LaunchScreen.storyboard;
INFOPLIST_KEY_UIRequiredDeviceCapabilities = arkit;
INFOPLIST_KEY_UIStatusBarHidden = YES;
INFOPLIST_KEY_UISupportedInterfaceOrientations = "UIInterfaceOrientationLandscapeLeft UIInterfaceOrientationLandscapeRight UIInterfaceOrientationPortrait UIInterfaceOrientationPortraitUpsideDown";
INFOPLIST_KEY_UISupportsDocumentBrowser = YES;
IPHONEOS_DEPLOYMENT_TARGET = 15.0;
LD_RUNPATH_SEARCH_PATHS = (
"$(inherited)",
"@executable_path/Frameworks",
);
MARKETING_VERSION = 1.0;
PRODUCT_BUNDLE_IDENTIFIER = au.edu.qcr.NeRFCapture;
PRODUCT_NAME = "$(TARGET_NAME)";
PROVISIONING_PROFILE_SPECIFIER = "";
SUPPORTED_PLATFORMS = "iphoneos iphonesimulator";
SUPPORTS_MACCATALYST = NO;
SUPPORTS_MAC_DESIGNED_FOR_IPHONE_IPAD = NO;
SWIFT_EMIT_LOC_STRINGS = YES;
SWIFT_OBJC_BRIDGING_HEADER = "NeRFCapture/Networking/CycloneDDS-Bridging-Header.h";
SWIFT_VERSION = 5.0;
TARGETED_DEVICE_FAMILY = "1,2";
};
name = Debug;
};
C0C52A9E287E7F960011820F /* Release */ = {
isa = XCBuildConfiguration;
buildSettings = {
ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon;
ASSETCATALOG_COMPILER_GLOBAL_ACCENT_COLOR_NAME = AccentColor;
CODE_SIGN_IDENTITY = "Apple Development";
CODE_SIGN_STYLE = Automatic;
CURRENT_PROJECT_VERSION = 1;
DEVELOPMENT_ASSET_PATHS = "\"NeRFCapture/Preview Content\"";
DEVELOPMENT_TEAM = 2DFGSA53H4;
ENABLE_PREVIEWS = YES;
GENERATE_INFOPLIST_FILE = YES;
INFOPLIST_FILE = NeRFCapture/Info.plist;
INFOPLIST_KEY_CFBundleDisplayName = "NeRF Capture";
INFOPLIST_KEY_LSApplicationCategoryType = "public.app-category.education";
INFOPLIST_KEY_LSSupportsOpeningDocumentsInPlace = YES;
INFOPLIST_KEY_NSCameraUsageDescription = "";
INFOPLIST_KEY_UIApplicationSupportsIndirectInputEvents = YES;
INFOPLIST_KEY_UILaunchStoryboardName = LaunchScreen.storyboard;
INFOPLIST_KEY_UIRequiredDeviceCapabilities = arkit;
INFOPLIST_KEY_UIStatusBarHidden = YES;
INFOPLIST_KEY_UISupportedInterfaceOrientations = "UIInterfaceOrientationLandscapeLeft UIInterfaceOrientationLandscapeRight UIInterfaceOrientationPortrait UIInterfaceOrientationPortraitUpsideDown";
INFOPLIST_KEY_UISupportsDocumentBrowser = YES;
IPHONEOS_DEPLOYMENT_TARGET = 15.0;
LD_RUNPATH_SEARCH_PATHS = (
"$(inherited)",
"@executable_path/Frameworks",
);
MARKETING_VERSION = 1.0;
PRODUCT_BUNDLE_IDENTIFIER = au.edu.qcr.NeRFCapture;
PRODUCT_NAME = "$(TARGET_NAME)";
PROVISIONING_PROFILE_SPECIFIER = "";
SUPPORTED_PLATFORMS = "iphoneos iphonesimulator";
SUPPORTS_MACCATALYST = NO;
SUPPORTS_MAC_DESIGNED_FOR_IPHONE_IPAD = NO;
SWIFT_EMIT_LOC_STRINGS = YES;
SWIFT_OBJC_BRIDGING_HEADER = "NeRFCapture/Networking/CycloneDDS-Bridging-Header.h";
SWIFT_VERSION = 5.0;
TARGETED_DEVICE_FAMILY = "1,2";
};
name = Release;
};
/* End XCBuildConfiguration section */
/* Begin XCConfigurationList section */
C0C52A87287E7F940011820F /* Build configuration list for PBXProject "NeRFCapture" */ = {
isa = XCConfigurationList;
buildConfigurations = (
C0C52A9A287E7F960011820F /* Debug */,
C0C52A9B287E7F960011820F /* Release */,
);
defaultConfigurationIsVisible = 0;
defaultConfigurationName = Release;
};
C0C52A9C287E7F960011820F /* Build configuration list for PBXNativeTarget "NeRFCapture" */ = {
isa = XCConfigurationList;
buildConfigurations = (
C0C52A9D287E7F960011820F /* Debug */,
C0C52A9E287E7F960011820F /* Release */,
);
defaultConfigurationIsVisible = 0;
defaultConfigurationName = Release;
};
/* End XCConfigurationList section */
/* Begin XCRemoteSwiftPackageReference section */
0539EAAF2965458E00C51AB9 /* XCRemoteSwiftPackageReference "Zip" */ = {
isa = XCRemoteSwiftPackageReference;
repositoryURL = "https://github.com/marmelroy/Zip";
requirement = {
branch = master;
kind = branch;
};
};
0539EAB229654B0400C51AB9 /* XCRemoteSwiftPackageReference "CycloneDDSPrebuild" */ = {
isa = XCRemoteSwiftPackageReference;
repositoryURL = "https://github.com/jc211/CycloneDDSPrebuild.git";
requirement = {
branch = main;
kind = branch;
};
};
/* End XCRemoteSwiftPackageReference section */
/* Begin XCSwiftPackageProductDependency section */
0539EAB02965458E00C51AB9 /* Zip */ = {
isa = XCSwiftPackageProductDependency;
package = 0539EAAF2965458E00C51AB9 /* XCRemoteSwiftPackageReference "Zip" */;
productName = Zip;
};
0539EAB329654B0400C51AB9 /* cyclonedds */ = {
isa = XCSwiftPackageProductDependency;
package = 0539EAB229654B0400C51AB9 /* XCRemoteSwiftPackageReference "CycloneDDSPrebuild" */;
productName = cyclonedds;
};
/* End XCSwiftPackageProductDependency section */
};
rootObject = C0C52A84287E7F940011820F /* Project object */;
}
================================================
FILE: NeRFCapture.xcodeproj/project.xcworkspace/contents.xcworkspacedata
================================================
<?xml version="1.0" encoding="UTF-8"?>
<Workspace
version = "1.0">
<FileRef
location = "self:">
</FileRef>
</Workspace>
================================================
FILE: NeRFCapture.xcodeproj/project.xcworkspace/xcshareddata/IDEWorkspaceChecks.plist
================================================
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0">
<dict>
<key>IDEDidComputeMac32BitWarning</key>
<true/>
</dict>
</plist>
================================================
FILE: NeRFCapture.xcodeproj/project.xcworkspace/xcshareddata/WorkspaceSettings.xcsettings
================================================
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0">
<dict>
<key>PreviewsEnabled</key>
<false/>
</dict>
</plist>
================================================
FILE: NeRFCapture.xcodeproj/project.xcworkspace/xcshareddata/swiftpm/Package.resolved
================================================
{
"pins" : [
{
"identity" : "cycloneddsprebuild",
"kind" : "remoteSourceControl",
"location" : "https://github.com/jc211/CycloneDDSPrebuild.git",
"state" : {
"branch" : "main",
"revision" : "38991d7195f7d64eb9e1ae630bc2efbecada40d2"
}
},
{
"identity" : "zip",
"kind" : "remoteSourceControl",
"location" : "https://github.com/marmelroy/Zip",
"state" : {
"branch" : "master",
"revision" : "67fa55813b9e7b3b9acee9c0ae501def28746d76"
}
}
],
"version" : 2
}
================================================
FILE: NeRFCapture.xcodeproj/xcshareddata/xcschemes/NeRFCapture.xcscheme
================================================
<?xml version="1.0" encoding="UTF-8"?>
<Scheme
LastUpgradeVersion = "1410"
version = "1.3">
<BuildAction
parallelizeBuildables = "YES"
buildImplicitDependencies = "YES">
<BuildActionEntries>
<BuildActionEntry
buildForTesting = "YES"
buildForRunning = "YES"
buildForProfiling = "YES"
buildForArchiving = "YES"
buildForAnalyzing = "YES">
<BuildableReference
BuildableIdentifier = "primary"
BlueprintIdentifier = "C0C52A8B287E7F940011820F"
BuildableName = "NeRFCapture.app"
BlueprintName = "NeRFCapture"
ReferencedContainer = "container:NeRFCapture.xcodeproj">
</BuildableReference>
</BuildActionEntry>
</BuildActionEntries>
</BuildAction>
<TestAction
buildConfiguration = "Debug"
selectedDebuggerIdentifier = "Xcode.DebuggerFoundation.Debugger.LLDB"
selectedLauncherIdentifier = "Xcode.DebuggerFoundation.Launcher.LLDB"
shouldUseLaunchSchemeArgsEnv = "YES">
<Testables>
</Testables>
</TestAction>
<LaunchAction
buildConfiguration = "Debug"
selectedDebuggerIdentifier = "Xcode.DebuggerFoundation.Debugger.LLDB"
selectedLauncherIdentifier = "Xcode.DebuggerFoundation.Launcher.LLDB"
launchStyle = "0"
useCustomWorkingDirectory = "NO"
ignoresPersistentStateOnLaunch = "NO"
debugDocumentVersioning = "YES"
debugServiceExtension = "internal"
allowLocationSimulation = "YES">
<BuildableProductRunnable
runnableDebuggingMode = "0">
<BuildableReference
BuildableIdentifier = "primary"
BlueprintIdentifier = "C0C52A8B287E7F940011820F"
BuildableName = "NeRFCapture.app"
BlueprintName = "NeRFCapture"
ReferencedContainer = "container:NeRFCapture.xcodeproj">
</BuildableReference>
</BuildableProductRunnable>
</LaunchAction>
<ProfileAction
buildConfiguration = "Release"
shouldUseLaunchSchemeArgsEnv = "YES"
savedToolIdentifier = ""
useCustomWorkingDirectory = "NO"
debugDocumentVersioning = "YES">
<BuildableProductRunnable
runnableDebuggingMode = "0">
<BuildableReference
BuildableIdentifier = "primary"
BlueprintIdentifier = "C0C52A8B287E7F940011820F"
BuildableName = "NeRFCapture.app"
BlueprintName = "NeRFCapture"
ReferencedContainer = "container:NeRFCapture.xcodeproj">
</BuildableReference>
</BuildableProductRunnable>
</ProfileAction>
<AnalyzeAction
buildConfiguration = "Debug">
</AnalyzeAction>
<ArchiveAction
buildConfiguration = "Release"
revealArchiveInOrganizer = "YES">
</ArchiveAction>
</Scheme>
================================================
FILE: README.md
================================================
# NeRF Capture
<img src="docs/assets_readme/NeRFCaptureReal.png" height="342"/><img src="docs/assets_readme/NeRFCaptureSample.gif" height="342"/>
Collecting NeRF datasets is difficult. NeRF Capture is an iOS application that allows any iPhone or iPad to quickly collect or stream posed images to [InstantNGP](https://github.com/NVlabs/instant-ngp). If your device has a LiDAR, the depth images will be saved/streamed as well. The app has two modes: Offline and Online. In Offline mode, the dataset is saved to the device and can be accessed in the Files App in the NeRFCapture folder. Online mode uses [CycloneDDS](https://github.com/eclipse-cyclonedds/cyclonedds) to publish the posed images on the network. A Python script then collects the images and provides them to InstantNGP.
<a href="https://apps.apple.com/us/app/nerfcapture/id6446518379?itsct=apps_box_badge&itscg=30200" style="display: inline-block; overflow: hidden; border-radius: 13px; width: 150px; height: 53px;"><img src="https://tools.applemediaservices.com/api/badges/download-on-the-app-store/black/en-us?size=250x83&releaseDate=1679443200" alt="Download on the App Store" style="border-radius: 13px; width: 150px; height: 53px;"></a>
## Online Mode
<img src="docs/assets_readme/NeRFCaptureScreenshot.png" height="342"/>
Use the Reset button to reset the coordinate system to the current position of the camera. This takes a while; wait until the tracking initialized before moving away.
Switch the app to online mode. On the computer running InstantNGP, make sure that CycloneDDS is installed in the same python environment that is running pyngp. OpenCV and Pillow are needed to save and resize images.
```
pip install cyclonedds
```
Check that the computer can see the device on your network by running in your terminal:
```
cyclonedds ps
```
Instructions found in [here](https://github.com/NVlabs/instant-ngp/blob/master/docs/nerf_dataset_tips.md#NeRFCapture)
## Offline Mode
In Offline mode, clicking start initializes the dataset. Take a few images then click End when you're done. The dataset can be found as a zip file in your Files App in the format that InstantNGP expects. Unzip the dataset and drag and drop it into InstantNGP. We have found it farely difficult to get files transferred from an iOS device to another computer so we recommend running the app in Online mode and collecting the dataset with the nerfcapture2nerf.py script found in InstantNGP.
<img src="docs/assets_readme/NeRFCaptureFile1.png" height="342"/>
<img src="docs/assets_readme/NeRFCaptureFile2.png" height="342"/>
## Citation
If you use this software in your research, please consider citing it.
```bibtex
@misc{
NeRFCapture,
url={https://github.com/jc211/NeRFCapture},
journal={NeRFCapture},
author={Abou-Chakra, Jad},
year={2023},
month={Mar}
}
```
gitextract_wykh3jz3/ ├── .gitignore ├── CITATION.cff ├── LICENCE ├── NeRFCapture/ │ ├── AppDelegate.swift │ ├── Assets.xcassets/ │ │ ├── AccentColor.colorset/ │ │ │ └── Contents.json │ │ ├── AppIcon.appiconset/ │ │ │ └── Contents.json │ │ ├── Contents.json │ │ └── NeRFCaptureSplash.imageset/ │ │ └── Contents.json │ ├── DDSModel/ │ │ ├── NeRFCaptureData.c │ │ ├── NeRFCaptureData.h │ │ └── NeRFCaptureData.idl │ ├── DDSWriter.swift │ ├── DatasetWriter.swift │ ├── Info.plist │ ├── Models/ │ │ ├── AppState.swift │ │ └── Manifest.swift │ ├── Networking/ │ │ └── CycloneDDS-Bridging-Header.h │ ├── Preview Content/ │ │ └── Preview Assets.xcassets/ │ │ └── Contents.json │ ├── Shaders.metal │ ├── Utils.swift │ ├── ViewModels/ │ │ └── ARViewModel.swift │ └── Views/ │ ├── ARViewContainer.swift │ └── ContentView.swift ├── NeRFCapture.xcodeproj/ │ ├── project.pbxproj │ ├── project.xcworkspace/ │ │ ├── contents.xcworkspacedata │ │ └── xcshareddata/ │ │ ├── IDEWorkspaceChecks.plist │ │ ├── WorkspaceSettings.xcsettings │ │ └── swiftpm/ │ │ └── Package.resolved │ └── xcshareddata/ │ └── xcschemes/ │ └── NeRFCapture.xcscheme └── README.md
SYMBOL INDEX (3 symbols across 1 files) FILE: NeRFCapture/DDSModel/NeRFCaptureData.h type dds_sequence_octet (line 19) | typedef struct dds_sequence_octet type dds_sequence_octet (line 36) | typedef struct dds_sequence_octet type NeRFCaptureData_NeRFCaptureFrame (line 51) | typedef struct NeRFCaptureData_NeRFCaptureFrame
Condensed preview — 30 files, each showing path, character count, and a content snippet. Download the .json file or copy for the full structured content (93K chars).
[
{
"path": ".gitignore",
"chars": 2050,
"preview": "## User settings\nxcuserdata/\n\n## compatibility with Xcode 8 and earlier (ignoring not required starting Xcode 9)\n*.xcscm"
},
{
"path": "CITATION.cff",
"chars": 304,
"preview": "cff-version: 1.2.0\nmessage: \"If you use this software, please cite it as below.\"\nauthors:\n- family-names: \"Abou-Chakra\"\n"
},
{
"path": "LICENCE",
"chars": 1073,
"preview": "MIT License\n\nCopyright (c) 2023 Jad Abou-Chakra \n\nPermission is hereby granted, free of charge, to any person obtaining "
},
{
"path": "NeRFCapture/AppDelegate.swift",
"chars": 3040,
"preview": "//\n// AppDelegate.swift\n// NeRFCapture\n//\n// Created by Jad Abou-Chakra on 13/7/2022.\n//\n\nimport UIKit\nimport SwiftUI"
},
{
"path": "NeRFCapture/Assets.xcassets/AccentColor.colorset/Contents.json",
"chars": 326,
"preview": "{\n \"colors\" : [\n {\n \"color\" : {\n \"color-space\" : \"srgb\",\n \"components\" : {\n \"alpha\" : \"1"
},
{
"path": "NeRFCapture/Assets.xcassets/AppIcon.appiconset/Contents.json",
"chars": 3161,
"preview": "{\n \"images\" : [\n {\n \"filename\" : \"Icon-40.png\",\n \"idiom\" : \"iphone\",\n \"scale\" : \"2x\",\n \"size\" : "
},
{
"path": "NeRFCapture/Assets.xcassets/Contents.json",
"chars": 63,
"preview": "{\n \"info\" : {\n \"author\" : \"xcode\",\n \"version\" : 1\n }\n}\n"
},
{
"path": "NeRFCapture/Assets.xcassets/NeRFCaptureSplash.imageset/Contents.json",
"chars": 407,
"preview": "{\n \"images\" : [\n {\n \"filename\" : \"NeRFCaptureSplash 2.png\",\n \"idiom\" : \"universal\",\n \"scale\" : \"1x\"\n "
},
{
"path": "NeRFCapture/DDSModel/NeRFCaptureData.c",
"chars": 9286,
"preview": "/****************************************************************\n\n Generated by Eclipse Cyclone DDS IDL to C Translato"
},
{
"path": "NeRFCapture/DDSModel/NeRFCaptureData.h",
"chars": 2069,
"preview": "/****************************************************************\n\n Generated by Eclipse Cyclone DDS IDL to C Translato"
},
{
"path": "NeRFCapture/DDSModel/NeRFCaptureData.idl",
"chars": 442,
"preview": "module NeRFCaptureData\n{\n @final\n struct NeRFCaptureFrame\n {\n @key\n uint32 id;\n double timestamp"
},
{
"path": "NeRFCapture/DDSWriter.swift",
"chars": 5779,
"preview": "//\n// DDSWriter.swift\n// NeRFCapture\n//\n// Created by Jad Abou-Chakra on 11/1/2023.\n//\n\nimport Foundation\nimport ARKi"
},
{
"path": "NeRFCapture/DatasetWriter.swift",
"chars": 6318,
"preview": "//\n// DatasetWriter.swift\n// NeRFCapture\n//\n// Created by Jad Abou-Chakra on 11/1/2023.\n//\n\nimport Foundation\nimport "
},
{
"path": "NeRFCapture/Info.plist",
"chars": 429,
"preview": "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n<!DOCTYPE plist PUBLIC \"-//Apple//DTD PLIST 1.0//EN\" \"http://www.apple.com/DTDs/P"
},
{
"path": "NeRFCapture/Models/AppState.swift",
"chars": 1017,
"preview": "//\n// AppState.swift\n// NeRFCapture\n//\n// Created by Jad Abou-Chakra on 13/7/2022.\n//\n\nimport Foundation\nimport Metal"
},
{
"path": "NeRFCapture/Models/Manifest.swift",
"chars": 665,
"preview": "//\n// Manifest.swift\n// NeRFCapture\n//\n// Created by Jad Abou-Chakra on 13/7/2022.\n//\n\nimport Foundation\n\nstruct Mani"
},
{
"path": "NeRFCapture/Networking/CycloneDDS-Bridging-Header.h",
"chars": 389,
"preview": "//\n// CycloneDDS-Bridging-Header.h\n// NeRFCapture\n//\n// Created by Jad Abou-Chakra on 9/12/2022.\n//\n\n#ifndef CycloneD"
},
{
"path": "NeRFCapture/Preview Content/Preview Assets.xcassets/Contents.json",
"chars": 62,
"preview": "{\n \"info\" : {\n \"version\" : 1,\n \"author\" : \"xcode\"\n }\n}"
},
{
"path": "NeRFCapture/Shaders.metal",
"chars": 1661,
"preview": "//\n// Shaders.metal\n// NeRFCapture\n//\n// Created by Jad Abou-Chakra on 19/12/2022.\n//\n\n#include <metal_stdlib>\nusing "
},
{
"path": "NeRFCapture/Utils.swift",
"chars": 6456,
"preview": "//\n// Utils.swift\n// NeRFCapture\n//\n// Created by Jad Abou-Chakra on 13/7/2022.\n//\n\nimport Foundation\nimport ARKit\n\nf"
},
{
"path": "NeRFCapture/ViewModels/ARViewModel.swift",
"chars": 2839,
"preview": "//\n// ARViewModel.swift\n// NeRFCapture\n//\n// Created by Jad Abou-Chakra on 13/7/2022.\n//\n\nimport Foundation\nimport Zi"
},
{
"path": "NeRFCapture/Views/ARViewContainer.swift",
"chars": 1161,
"preview": "//\n// ARView.swift\n// NeRFCapture\n//\n// Created by Jad Abou-Chakra on 13/7/2022.\n//\n\nimport SwiftUI\nimport RealityKit"
},
{
"path": "NeRFCapture/Views/ContentView.swift",
"chars": 7352,
"preview": "//\n// ContentView.swift\n// NeRFCapture\n//\n// Created by Jad Abou-Chakra on 13/7/2022.\n//\n\nimport SwiftUI\nimport ARKit"
},
{
"path": "NeRFCapture.xcodeproj/project.pbxproj",
"chars": 22852,
"preview": "// !$*UTF8*$!\n{\n\tarchiveVersion = 1;\n\tclasses = {\n\t};\n\tobjectVersion = 55;\n\tobjects = {\n\n/* Begin PBXBuildFile section *"
},
{
"path": "NeRFCapture.xcodeproj/project.xcworkspace/contents.xcworkspacedata",
"chars": 135,
"preview": "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n<Workspace\n version = \"1.0\">\n <FileRef\n location = \"self:\">\n </FileRef"
},
{
"path": "NeRFCapture.xcodeproj/project.xcworkspace/xcshareddata/IDEWorkspaceChecks.plist",
"chars": 238,
"preview": "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n<!DOCTYPE plist PUBLIC \"-//Apple//DTD PLIST 1.0//EN\" \"http://www.apple.com/DTDs/P"
},
{
"path": "NeRFCapture.xcodeproj/project.xcworkspace/xcshareddata/WorkspaceSettings.xcsettings",
"chars": 226,
"preview": "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n<!DOCTYPE plist PUBLIC \"-//Apple//DTD PLIST 1.0//EN\" \"http://www.apple.com/DTDs/P"
},
{
"path": "NeRFCapture.xcodeproj/project.xcworkspace/xcshareddata/swiftpm/Package.resolved",
"chars": 567,
"preview": "{\n \"pins\" : [\n {\n \"identity\" : \"cycloneddsprebuild\",\n \"kind\" : \"remoteSourceControl\",\n \"location\" : \""
},
{
"path": "NeRFCapture.xcodeproj/xcshareddata/xcschemes/NeRFCapture.xcscheme",
"chars": 2876,
"preview": "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n<Scheme\n LastUpgradeVersion = \"1410\"\n version = \"1.3\">\n <BuildAction\n "
},
{
"path": "README.md",
"chars": 2852,
"preview": "# NeRF Capture \n<img src=\"docs/assets_readme/NeRFCaptureReal.png\" height=\"342\"/><img src=\"docs/assets_readme/NeRFCapture"
}
]
About this extraction
This page contains the full source code of the jc211/NeRFCapture GitHub repository, extracted and formatted as plain text for AI agents and large language models (LLMs). The extraction includes 30 files (84.1 KB), approximately 27.5k tokens, and a symbol index with 3 extracted functions, classes, methods, constants, and types. Use this with OpenClaw, Claude, ChatGPT, Cursor, Windsurf, or any other AI tool that accepts text input. You can copy the full output to your clipboard or download it as a .txt file.
Extracted by GitExtract — free GitHub repo to text converter for AI. Built by Nikandr Surkov.