Repository: yulingtianxia/Core-ML-Sample
Branch: master
Commit: 7dd7df33219d
Files: 16
Total size: 90.4 MB
Directory structure:
gitextract_ixdvp4cm/
├── .gitignore
├── CoreMLSample/
│ ├── AppDelegate.swift
│ ├── Assets.xcassets/
│ │ └── AppIcon.appiconset/
│ │ └── Contents.json
│ ├── Base.lproj/
│ │ ├── LaunchScreen.storyboard
│ │ └── Main.storyboard
│ ├── Inceptionv3.mlmodel
│ ├── Info.plist
│ ├── Main.storyboard
│ ├── VideoCapture/
│ │ ├── AVCaptureDevice+Extension.swift
│ │ ├── VideoCameraType.swift
│ │ └── VideoCapture.swift
│ └── ViewController.swift
├── CoreMLSample.xcodeproj/
│ ├── project.pbxproj
│ └── project.xcworkspace/
│ └── contents.xcworkspacedata
├── LICENSE
└── README.md
================================================
FILE CONTENTS
================================================
================================================
FILE: .gitignore
================================================
# Xcode
#
# gitignore contributors: remember to update Global/Xcode.gitignore, Objective-C.gitignore & Swift.gitignore
## Build generated
build/
DerivedData/
## Various settings
*.pbxuser
!default.pbxuser
*.mode1v3
!default.mode1v3
*.mode2v3
!default.mode2v3
*.perspectivev3
!default.perspectivev3
xcuserdata/
## Other
*.moved-aside
*.xccheckout
*.xcscmblueprint
## Obj-C/Swift specific
*.hmap
*.ipa
*.dSYM.zip
*.dSYM
## Playgrounds
timeline.xctimeline
playground.xcworkspace
# Swift Package Manager
#
# Add this line if you want to avoid checking in source code from Swift Package Manager dependencies.
# Packages/
# Package.pins
.build/
# CocoaPods
#
# We recommend against adding the Pods directory to your .gitignore. However
# you should judge for yourself, the pros and cons are mentioned at:
# https://guides.cocoapods.org/using/using-cocoapods.html#should-i-check-the-pods-directory-into-source-control
#
# Pods/
# Carthage
#
# Add this line if you want to avoid checking in source code from Carthage dependencies.
# Carthage/Checkouts
Carthage/Build
# fastlane
#
# It is recommended to not store the screenshots in the git repo. Instead, use fastlane to re-generate the
# screenshots whenever they are needed.
# For more information about the recommended setup visit:
# https://docs.fastlane.tools/best-practices/source-control/#source-control
fastlane/report.xml
fastlane/Preview.html
fastlane/screenshots
fastlane/test_output
================================================
FILE: CoreMLSample/AppDelegate.swift
================================================
//
// AppDelegate.swift
// CoreMLSimple
//
// Created by 杨萧玉 on 2017/6/9.
// Copyright © 2017年 杨萧玉. All rights reserved.
//
import UIKit
@UIApplicationMain
class AppDelegate: UIResponder, UIApplicationDelegate {
var window: UIWindow?
func application(_ application: UIApplication, didFinishLaunchingWithOptions launchOptions: [UIApplicationLaunchOptionsKey: Any]?) -> Bool {
// Override point for customization after application launch.
return true
}
func applicationWillResignActive(_ application: UIApplication) {
// Sent when the application is about to move from active to inactive state. This can occur for certain types of temporary interruptions (such as an incoming phone call or SMS message) or when the user quits the application and it begins the transition to the background state.
// Use this method to pause ongoing tasks, disable timers, and invalidate graphics rendering callbacks. Games should use this method to pause the game.
}
func applicationDidEnterBackground(_ application: UIApplication) {
// Use this method to release shared resources, save user data, invalidate timers, and store enough application state information to restore your application to its current state in case it is terminated later.
// If your application supports background execution, this method is called instead of applicationWillTerminate: when the user quits.
}
func applicationWillEnterForeground(_ application: UIApplication) {
// Called as part of the transition from the background to the active state; here you can undo many of the changes made on entering the background.
}
func applicationDidBecomeActive(_ application: UIApplication) {
// Restart any tasks that were paused (or not yet started) while the application was inactive. If the application was previously in the background, optionally refresh the user interface.
}
func applicationWillTerminate(_ application: UIApplication) {
// Called when the application is about to terminate. Save data if appropriate. See also applicationDidEnterBackground:.
}
}
================================================
FILE: CoreMLSample/Assets.xcassets/AppIcon.appiconset/Contents.json
================================================
{
"images" : [
{
"idiom" : "iphone",
"size" : "20x20",
"scale" : "2x"
},
{
"idiom" : "iphone",
"size" : "20x20",
"scale" : "3x"
},
{
"idiom" : "iphone",
"size" : "29x29",
"scale" : "2x"
},
{
"idiom" : "iphone",
"size" : "29x29",
"scale" : "3x"
},
{
"idiom" : "iphone",
"size" : "40x40",
"scale" : "2x"
},
{
"idiom" : "iphone",
"size" : "40x40",
"scale" : "3x"
},
{
"idiom" : "iphone",
"size" : "60x60",
"scale" : "2x"
},
{
"size" : "60x60",
"idiom" : "iphone",
"filename" : "core-ml-128x128_2x.png",
"scale" : "3x"
},
{
"idiom" : "ipad",
"size" : "20x20",
"scale" : "1x"
},
{
"idiom" : "ipad",
"size" : "20x20",
"scale" : "2x"
},
{
"idiom" : "ipad",
"size" : "29x29",
"scale" : "1x"
},
{
"idiom" : "ipad",
"size" : "29x29",
"scale" : "2x"
},
{
"idiom" : "ipad",
"size" : "40x40",
"scale" : "1x"
},
{
"idiom" : "ipad",
"size" : "40x40",
"scale" : "2x"
},
{
"idiom" : "ipad",
"size" : "76x76",
"scale" : "1x"
},
{
"idiom" : "ipad",
"size" : "76x76",
"scale" : "2x"
},
{
"idiom" : "ipad",
"size" : "83.5x83.5",
"scale" : "2x"
},
{
"idiom" : "ios-marketing",
"size" : "1024x1024",
"scale" : "1x"
}
],
"info" : {
"version" : 1,
"author" : "xcode"
}
}
================================================
FILE: CoreMLSample/Base.lproj/LaunchScreen.storyboard
================================================
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
<document type="com.apple.InterfaceBuilder3.CocoaTouch.Storyboard.XIB" version="3.0" toolsVersion="11134" systemVersion="15F34" targetRuntime="iOS.CocoaTouch" propertyAccessControl="none" useAutolayout="YES" launchScreen="YES" useTraitCollections="YES" colorMatched="YES" initialViewController="01J-lp-oVM">
<dependencies>
<plugIn identifier="com.apple.InterfaceBuilder.IBCocoaTouchPlugin" version="11106"/>
<capability name="documents saved in the Xcode 8 format" minToolsVersion="8.0"/>
</dependencies>
<scenes>
<!--View Controller-->
<scene sceneID="EHf-IW-A2E">
<objects>
<viewController id="01J-lp-oVM" sceneMemberID="viewController">
<layoutGuides>
<viewControllerLayoutGuide type="top" id="Llm-lL-Icb"/>
<viewControllerLayoutGuide type="bottom" id="xb3-aO-Qok"/>
</layoutGuides>
<view key="view" contentMode="scaleToFill" id="Ze5-6b-2t3">
<rect key="frame" x="0.0" y="0.0" width="375" height="667"/>
<autoresizingMask key="autoresizingMask" widthSizable="YES" heightSizable="YES"/>
<color key="backgroundColor" red="1" green="1" blue="1" alpha="1" colorSpace="custom" customColorSpace="sRGB"/>
</view>
</viewController>
<placeholder placeholderIdentifier="IBFirstResponder" id="iYj-Kq-Ea1" userLabel="First Responder" sceneMemberID="firstResponder"/>
</objects>
<point key="canvasLocation" x="53" y="375"/>
</scene>
</scenes>
</document>
================================================
FILE: CoreMLSample/Base.lproj/Main.storyboard
================================================
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
<document type="com.apple.InterfaceBuilder3.CocoaTouch.Storyboard.XIB" version="3.0" toolsVersion="11134" systemVersion="15F34" targetRuntime="iOS.CocoaTouch" propertyAccessControl="none" useAutolayout="YES" useTraitCollections="YES" colorMatched="YES" initialViewController="BYZ-38-t0r">
<dependencies>
<plugIn identifier="com.apple.InterfaceBuilder.IBCocoaTouchPlugin" version="11106"/>
<capability name="documents saved in the Xcode 8 format" minToolsVersion="8.0"/>
</dependencies>
<scenes>
<!--View Controller-->
<scene sceneID="tne-QT-ifu">
<objects>
<viewController id="BYZ-38-t0r" customClass="ViewController" customModuleProvider="target" sceneMemberID="viewController">
<layoutGuides>
<viewControllerLayoutGuide type="top" id="y3c-jy-aDJ"/>
<viewControllerLayoutGuide type="bottom" id="wfy-db-euE"/>
</layoutGuides>
<view key="view" contentMode="scaleToFill" id="8bC-Xf-vdC">
<rect key="frame" x="0.0" y="0.0" width="375" height="667"/>
<autoresizingMask key="autoresizingMask" widthSizable="YES" heightSizable="YES"/>
<color key="backgroundColor" red="1" green="1" blue="1" alpha="1" colorSpace="custom" customColorSpace="sRGB"/>
</view>
</viewController>
<placeholder placeholderIdentifier="IBFirstResponder" id="dkx-z0-nzr" sceneMemberID="firstResponder"/>
</objects>
</scene>
</scenes>
</document>
================================================
FILE: CoreMLSample/Inceptionv3.mlmodel
================================================
[File too large to display: 90.3 MB]
================================================
FILE: CoreMLSample/Info.plist
================================================
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0">
<dict>
<key>CFBundleDevelopmentRegion</key>
<string>$(DEVELOPMENT_LANGUAGE)</string>
<key>CFBundleExecutable</key>
<string>$(EXECUTABLE_NAME)</string>
<key>CFBundleIdentifier</key>
<string>$(PRODUCT_BUNDLE_IDENTIFIER)</string>
<key>CFBundleInfoDictionaryVersion</key>
<string>6.0</string>
<key>CFBundleName</key>
<string>$(PRODUCT_NAME)</string>
<key>CFBundlePackageType</key>
<string>APPL</string>
<key>CFBundleShortVersionString</key>
<string>1.0</string>
<key>CFBundleVersion</key>
<string>1</string>
<key>LSRequiresIPhoneOS</key>
<true/>
<key>UILaunchStoryboardName</key>
<string>LaunchScreen</string>
<key>UIMainStoryboardFile</key>
<string>Main</string>
<key>UIRequiredDeviceCapabilities</key>
<array>
<string>armv7</string>
</array>
<key>UISupportedInterfaceOrientations</key>
<array>
<string>UIInterfaceOrientationPortrait</string>
<string>UIInterfaceOrientationLandscapeLeft</string>
<string>UIInterfaceOrientationLandscapeRight</string>
</array>
<key>UISupportedInterfaceOrientations~ipad</key>
<array>
<string>UIInterfaceOrientationPortrait</string>
<string>UIInterfaceOrientationPortraitUpsideDown</string>
<string>UIInterfaceOrientationLandscapeLeft</string>
<string>UIInterfaceOrientationLandscapeRight</string>
</array>
<key>NSCameraUsageDescription</key>
<string>to take photos and video</string>
</dict>
</plist>
================================================
FILE: CoreMLSample/Main.storyboard
================================================
<?xml version="1.0" encoding="UTF-8"?>
<document type="com.apple.InterfaceBuilder3.CocoaTouch.Storyboard.XIB" version="3.0" toolsVersion="13122.17" systemVersion="16E195" targetRuntime="iOS.CocoaTouch" propertyAccessControl="none" useAutolayout="YES" useTraitCollections="YES" colorMatched="YES" initialViewController="0Xl-yk-nao">
<device id="retina4_7" orientation="portrait">
<adaptation id="fullscreen"/>
</device>
<dependencies>
<plugIn identifier="com.apple.InterfaceBuilder.IBCocoaTouchPlugin" version="13104.14"/>
<capability name="documents saved in the Xcode 8 format" minToolsVersion="8.0"/>
</dependencies>
<customFonts key="customFonts">
<array key="Menlo.ttc">
<string>Menlo-Bold</string>
</array>
</customFonts>
<scenes>
<!--View Controller-->
<scene sceneID="71d-m2-KJ4">
<objects>
<viewController id="0Xl-yk-nao" customClass="ViewController" customModule="CoreMLSample" sceneMemberID="viewController">
<layoutGuides>
<viewControllerLayoutGuide type="top" id="Cze-I2-EGa"/>
<viewControllerLayoutGuide type="bottom" id="Z5a-YL-kYA"/>
</layoutGuides>
<view key="view" contentMode="scaleToFill" id="f5H-zY-hKe">
<rect key="frame" x="0.0" y="0.0" width="375" height="667"/>
<autoresizingMask key="autoresizingMask" widthSizable="YES" heightSizable="YES"/>
<subviews>
<view contentMode="scaleToFill" translatesAutoresizingMaskIntoConstraints="NO" id="MPW-dG-rVE">
<rect key="frame" x="0.0" y="20" width="375" height="647"/>
<subviews>
<switch opaque="NO" contentMode="scaleToFill" horizontalHuggingPriority="750" verticalHuggingPriority="750" contentHorizontalAlignment="center" contentVerticalAlignment="center" on="YES" translatesAutoresizingMaskIntoConstraints="NO" id="qdg-1O-NKq">
<rect key="frame" x="318" y="553" width="51" height="31"/>
</switch>
<label opaque="NO" userInteractionEnabled="NO" contentMode="left" horizontalHuggingPriority="251" verticalHuggingPriority="251" text="Vision" textAlignment="natural" lineBreakMode="tailTruncation" baselineAdjustment="alignBaselines" adjustsFontSizeToFit="NO" translatesAutoresizingMaskIntoConstraints="NO" id="uyK-aH-yyg">
<rect key="frame" x="263.5" y="558" width="46.5" height="21"/>
<constraints>
<constraint firstAttribute="height" constant="21" id="KMT-Ze-6dK"/>
<constraint firstAttribute="width" constant="46.5" id="XB0-OT-bio"/>
</constraints>
<fontDescription key="fontDescription" type="system" pointSize="17"/>
<nil key="textColor"/>
<nil key="highlightedColor"/>
</label>
</subviews>
<color key="backgroundColor" white="1" alpha="1" colorSpace="calibratedWhite"/>
<constraints>
<constraint firstAttribute="trailing" secondItem="qdg-1O-NKq" secondAttribute="trailing" constant="8" id="c54-zn-nxu"/>
<constraint firstItem="qdg-1O-NKq" firstAttribute="leading" secondItem="uyK-aH-yyg" secondAttribute="trailing" constant="8" id="thV-tc-dyS"/>
<constraint firstItem="uyK-aH-yyg" firstAttribute="centerY" secondItem="qdg-1O-NKq" secondAttribute="centerY" id="uKu-Y1-Pac"/>
</constraints>
</view>
<view contentMode="scaleToFill" translatesAutoresizingMaskIntoConstraints="NO" id="xc3-W5-qAq">
<rect key="frame" x="4" y="612" width="367" height="51"/>
<subviews>
<label opaque="NO" contentMode="left" horizontalHuggingPriority="251" verticalHuggingPriority="251" text="Label" lineBreakMode="tailTruncation" numberOfLines="100" baselineAdjustment="alignBaselines" adjustsFontSizeToFit="NO" translatesAutoresizingMaskIntoConstraints="NO" id="hpR-SF-YSn">
<rect key="frame" x="16" y="16" width="335" height="19"/>
<fontDescription key="fontDescription" name="Menlo-Bold" family="Menlo" pointSize="16"/>
<color key="textColor" red="1" green="1" blue="1" alpha="1" colorSpace="custom" customColorSpace="sRGB"/>
<nil key="highlightedColor"/>
</label>
</subviews>
<color key="backgroundColor" white="0.0" alpha="0.5" colorSpace="calibratedWhite"/>
<constraints>
<constraint firstAttribute="bottom" secondItem="hpR-SF-YSn" secondAttribute="bottom" constant="16" id="DvO-4t-Nir"/>
<constraint firstItem="hpR-SF-YSn" firstAttribute="leading" secondItem="xc3-W5-qAq" secondAttribute="leading" constant="16" id="QeY-hJ-Cwm"/>
<constraint firstItem="hpR-SF-YSn" firstAttribute="top" secondItem="xc3-W5-qAq" secondAttribute="top" constant="16" id="fRn-HZ-A7M"/>
<constraint firstAttribute="trailing" secondItem="hpR-SF-YSn" secondAttribute="trailing" constant="16" id="odX-Yc-N24"/>
</constraints>
<userDefinedRuntimeAttributes>
<userDefinedRuntimeAttribute type="number" keyPath="layer.cornerRadius">
<integer key="value" value="2"/>
</userDefinedRuntimeAttribute>
</userDefinedRuntimeAttributes>
</view>
</subviews>
<color key="backgroundColor" white="1" alpha="1" colorSpace="calibratedWhite"/>
<constraints>
<constraint firstAttribute="trailing" secondItem="xc3-W5-qAq" secondAttribute="trailing" constant="4" id="GbP-lM-qpb"/>
<constraint firstAttribute="trailing" secondItem="MPW-dG-rVE" secondAttribute="trailing" id="GpE-Xt-H2b"/>
<constraint firstItem="MPW-dG-rVE" firstAttribute="top" secondItem="Cze-I2-EGa" secondAttribute="bottom" id="I48-Xy-1Mx"/>
<constraint firstAttribute="bottom" secondItem="MPW-dG-rVE" secondAttribute="bottom" id="N7z-Ew-IAo"/>
<constraint firstItem="Z5a-YL-kYA" firstAttribute="top" secondItem="xc3-W5-qAq" secondAttribute="bottom" constant="4" id="Q6h-tE-Uil"/>
<constraint firstItem="xc3-W5-qAq" firstAttribute="top" secondItem="qdg-1O-NKq" secondAttribute="bottom" constant="8" id="f8w-o7-6xF"/>
<constraint firstItem="MPW-dG-rVE" firstAttribute="leading" secondItem="f5H-zY-hKe" secondAttribute="leading" id="kVi-v1-w6X"/>
<constraint firstItem="xc3-W5-qAq" firstAttribute="leading" secondItem="f5H-zY-hKe" secondAttribute="leading" constant="4" id="qBL-IY-3ly"/>
</constraints>
</view>
<connections>
<outlet property="predictLabel" destination="hpR-SF-YSn" id="ac2-lY-Lsw"/>
<outlet property="previewView" destination="MPW-dG-rVE" id="6ZR-uF-fDC"/>
<outlet property="visionSwitch" destination="qdg-1O-NKq" id="466-ph-1qm"/>
</connections>
</viewController>
<placeholder placeholderIdentifier="IBFirstResponder" id="8f5-Pe-2NG" userLabel="First Responder" sceneMemberID="firstResponder"/>
</objects>
<point key="canvasLocation" x="36" y="-57.121439280359823"/>
</scene>
</scenes>
</document>
================================================
FILE: CoreMLSample/VideoCapture/AVCaptureDevice+Extension.swift
================================================
//
// AppDelegate.swift
// CoreMLSimple
//
// Created by 杨萧玉 on 2017/6/9.
// Copyright © 2017年 杨萧玉. All rights reserved.
// Based on Shuichi Tsutsumi's Code
import AVFoundation
extension AVCaptureDevice {
private func availableFormatsFor(preferredFps: Float64) -> [AVCaptureDevice.Format] {
var availableFormats: [AVCaptureDevice.Format] = []
for format in formats
{
let ranges = format.videoSupportedFrameRateRanges
for range in ranges where range.minFrameRate <= preferredFps && preferredFps <= range.maxFrameRate
{
availableFormats.append(format)
}
}
return availableFormats
}
private func formatWithHighestResolution(_ availableFormats: [AVCaptureDevice.Format]) -> AVCaptureDevice.Format?
{
var maxWidth: Int32 = 0
var selectedFormat: AVCaptureDevice.Format?
for format in availableFormats {
let desc = format.formatDescription
let dimensions = CMVideoFormatDescriptionGetDimensions(desc)
let width = dimensions.width
if width >= maxWidth {
maxWidth = width
selectedFormat = format
}
}
return selectedFormat
}
private func formatFor(preferredSize: CGSize, availableFormats: [AVCaptureDevice.Format]) -> AVCaptureDevice.Format?
{
for format in availableFormats {
let desc = format.formatDescription
let dimensions = CMVideoFormatDescriptionGetDimensions(desc)
if dimensions.width >= Int32(preferredSize.width) && dimensions.height >= Int32(preferredSize.height)
{
return format
}
}
return nil
}
func updateFormatWithPreferredVideoSpec(preferredSpec: VideoSpec)
{
let availableFormats: [AVCaptureDevice.Format]
if let preferredFps = preferredSpec.fps {
availableFormats = availableFormatsFor(preferredFps: Float64(preferredFps))
}
else {
availableFormats = formats
}
var selectedFormat: AVCaptureDevice.Format?
if let preferredSize = preferredSpec.size {
selectedFormat = formatFor(preferredSize: preferredSize, availableFormats: availableFormats)
} else {
selectedFormat = formatWithHighestResolution(availableFormats)
}
print("selected format: \(String(describing: selectedFormat))")
if let selectedFormat = selectedFormat {
do {
try lockForConfiguration()
}
catch {
fatalError("")
}
activeFormat = selectedFormat
if let preferredFps = preferredSpec.fps {
activeVideoMinFrameDuration = CMTimeMake(1, preferredFps)
activeVideoMaxFrameDuration = CMTimeMake(1, preferredFps)
unlockForConfiguration()
}
}
}
}
================================================
FILE: CoreMLSample/VideoCapture/VideoCameraType.swift
================================================
//
// AppDelegate.swift
// CoreMLSimple
//
// Created by 杨萧玉 on 2017/6/9.
// Copyright © 2017年 杨萧玉. All rights reserved.
// Based on Shuichi Tsutsumi's Code
import AVFoundation
enum CameraType : Int {
case back
case front
func captureDevice() -> AVCaptureDevice {
switch self {
case .front:
let devices = AVCaptureDevice.DiscoverySession(deviceTypes: [], mediaType: AVMediaType.video, position: .front).devices
print("devices:\(devices)")
for device in devices where device.position == .front {
return device
}
default:
break
}
return AVCaptureDevice.default(for: AVMediaType.video)!
}
}
================================================
FILE: CoreMLSample/VideoCapture/VideoCapture.swift
================================================
//
// AppDelegate.swift
// CoreMLSimple
//
// Created by 杨萧玉 on 2017/6/9.
// Copyright © 2017年 杨萧玉. All rights reserved.
// Based on Shuichi Tsutsumi's Code
import AVFoundation
import Foundation
struct VideoSpec {
var fps: Int32?
var size: CGSize?
}
typealias ImageBufferHandler = ((_ imageBuffer: CMSampleBuffer) -> ())
class VideoCapture: NSObject, AVCaptureVideoDataOutputSampleBufferDelegate {
private let captureSession = AVCaptureSession()
private var videoDevice: AVCaptureDevice!
private var videoConnection: AVCaptureConnection!
private var audioConnection: AVCaptureConnection!
private var previewLayer: AVCaptureVideoPreviewLayer?
var imageBufferHandler: ImageBufferHandler?
init(cameraType: CameraType, preferredSpec: VideoSpec?, previewContainer: CALayer?)
{
super.init()
videoDevice = cameraType.captureDevice()
// setup video format
do {
captureSession.sessionPreset = AVCaptureSession.Preset.inputPriority
if let preferredSpec = preferredSpec {
// update the format with a preferred fps
videoDevice.updateFormatWithPreferredVideoSpec(preferredSpec: preferredSpec)
}
}
// setup video device input
do {
let videoDeviceInput: AVCaptureDeviceInput
do {
videoDeviceInput = try AVCaptureDeviceInput(device: videoDevice)
}
catch {
fatalError("Could not create AVCaptureDeviceInput instance with error: \(error).")
}
guard captureSession.canAddInput(videoDeviceInput) else {
fatalError()
}
captureSession.addInput(videoDeviceInput)
}
// setup preview
if let previewContainer = previewContainer {
let previewLayer = AVCaptureVideoPreviewLayer(session: captureSession)
previewLayer.frame = previewContainer.bounds
previewLayer.contentsGravity = kCAGravityResizeAspectFill
previewLayer.videoGravity = AVLayerVideoGravity.resizeAspectFill
previewContainer.insertSublayer(previewLayer, at: 0)
self.previewLayer = previewLayer
}
// setup video output
do {
let videoDataOutput = AVCaptureVideoDataOutput()
videoDataOutput.videoSettings = [kCVPixelBufferPixelFormatTypeKey as AnyHashable as! String: NSNumber(value: kCVPixelFormatType_32BGRA)]
videoDataOutput.alwaysDiscardsLateVideoFrames = true
let queue = DispatchQueue(label: "com.shu223.videosamplequeue")
videoDataOutput.setSampleBufferDelegate(self, queue: queue)
guard captureSession.canAddOutput(videoDataOutput) else {
fatalError()
}
captureSession.addOutput(videoDataOutput)
videoConnection = videoDataOutput.connection(with: AVMediaType.video)
}
}
func startCapture() {
print("\(self.classForCoder)/" + #function)
if captureSession.isRunning {
print("already running")
return
}
captureSession.startRunning()
}
func stopCapture() {
print("\(self.classForCoder)/" + #function)
if !captureSession.isRunning {
print("already stopped")
return
}
captureSession.stopRunning()
}
func resizePreview() {
if let previewLayer = previewLayer {
guard let superlayer = previewLayer.superlayer else {return}
previewLayer.frame = superlayer.bounds
}
}
// =========================================================================
// MARK: - AVCaptureVideoDataOutputSampleBufferDelegate
func captureOutput(_ output: AVCaptureOutput, didOutput sampleBuffer: CMSampleBuffer, from connection: AVCaptureConnection) {
if connection.videoOrientation != .portrait {
connection.videoOrientation = .portrait
return
}
if let imageBufferHandler = imageBufferHandler
{
imageBufferHandler(sampleBuffer)
}
}
}
================================================
FILE: CoreMLSample/ViewController.swift
================================================
//
// ViewController.swift
// CoreMLSimple
//
// Created by 杨萧玉 on 2017/6/9.
// Copyright © 2017年 杨萧玉. All rights reserved.
//
import UIKit
import CoreMedia
import Vision
class ViewController: UIViewController, UIImagePickerControllerDelegate {
// Outlets to label and view
@IBOutlet private weak var predictLabel: UILabel!
@IBOutlet private weak var previewView: UIView!
@IBOutlet private weak var visionSwitch: UISwitch!
// some properties used to control the app and store appropriate values
let inceptionv3model = Inceptionv3()
private var videoCapture: VideoCapture!
private var requests = [VNRequest]()
override func viewDidLoad() {
super.viewDidLoad()
setupVision()
let spec = VideoSpec(fps: 5, size: CGSize(width: 299, height: 299))
videoCapture = VideoCapture(cameraType: .back,
preferredSpec: spec,
previewContainer: previewView.layer)
videoCapture.imageBufferHandler = {[unowned self] (imageBuffer) in
if self.visionSwitch.isOn {
// Use Vision
self.handleImageBufferWithVision(imageBuffer: imageBuffer)
}
else {
// Use Core ML
self.handleImageBufferWithCoreML(imageBuffer: imageBuffer)
}
}
}
func handleImageBufferWithCoreML(imageBuffer: CMSampleBuffer) {
guard let pixelBuffer = CMSampleBufferGetImageBuffer(imageBuffer) else {
return
}
do {
let prediction = try self.inceptionv3model.prediction(image: self.resize(pixelBuffer: pixelBuffer)!)
DispatchQueue.main.async {
if let prob = prediction.classLabelProbs[prediction.classLabel] {
self.predictLabel.text = "\(prediction.classLabel) \(String(describing: prob))"
}
}
}
catch let error as NSError {
fatalError("Unexpected error ocurred: \(error.localizedDescription).")
}
}
func handleImageBufferWithVision(imageBuffer: CMSampleBuffer) {
guard let pixelBuffer = CMSampleBufferGetImageBuffer(imageBuffer) else {
return
}
var requestOptions:[VNImageOption : Any] = [:]
if let cameraIntrinsicData = CMGetAttachment(imageBuffer, kCMSampleBufferAttachmentKey_CameraIntrinsicMatrix, nil) {
requestOptions = [.cameraIntrinsics:cameraIntrinsicData]
}
let imageRequestHandler = VNImageRequestHandler(cvPixelBuffer: pixelBuffer, orientation: CGImagePropertyOrientation(rawValue: UInt32(self.exifOrientationFromDeviceOrientation))!, options: requestOptions)
do {
try imageRequestHandler.perform(self.requests)
} catch {
print(error)
}
}
func setupVision() {
guard let visionModel = try? VNCoreMLModel(for: inceptionv3model.model) else {
fatalError("can't load Vision ML model")
}
let classificationRequest = VNCoreMLRequest(model: visionModel) { (request: VNRequest, error: Error?) in
guard let observations = request.results else {
print("no results:\(error!)")
return
}
let classifications = observations[0...4]
.flatMap({ $0 as? VNClassificationObservation })
.filter({ $0.confidence > 0.2 })
.map({ "\($0.identifier) \($0.confidence)" })
DispatchQueue.main.async {
self.predictLabel.text = classifications.joined(separator: "\n")
}
}
classificationRequest.imageCropAndScaleOption = VNImageCropAndScaleOption.centerCrop
self.requests = [classificationRequest]
}
/// only support back camera
var exifOrientationFromDeviceOrientation: Int32 {
let exifOrientation: DeviceOrientation
enum DeviceOrientation: Int32 {
case top0ColLeft = 1
case top0ColRight = 2
case bottom0ColRight = 3
case bottom0ColLeft = 4
case left0ColTop = 5
case right0ColTop = 6
case right0ColBottom = 7
case left0ColBottom = 8
}
switch UIDevice.current.orientation {
case .portraitUpsideDown:
exifOrientation = .left0ColBottom
case .landscapeLeft:
exifOrientation = .top0ColLeft
case .landscapeRight:
exifOrientation = .bottom0ColRight
default:
exifOrientation = .right0ColTop
}
return exifOrientation.rawValue
}
/// resize CVPixelBuffer
///
/// - Parameter pixelBuffer: CVPixelBuffer by camera output
/// - Returns: CVPixelBuffer with size (299, 299)
func resize(pixelBuffer: CVPixelBuffer) -> CVPixelBuffer? {
let imageSide = 299
var ciImage = CIImage(cvPixelBuffer: pixelBuffer, options: nil)
let transform = CGAffineTransform(scaleX: CGFloat(imageSide) / CGFloat(CVPixelBufferGetWidth(pixelBuffer)), y: CGFloat(imageSide) / CGFloat(CVPixelBufferGetHeight(pixelBuffer)))
ciImage = ciImage.transformed(by: transform).cropped(to: CGRect(x: 0, y: 0, width: imageSide, height: imageSide))
let ciContext = CIContext()
var resizeBuffer: CVPixelBuffer?
CVPixelBufferCreate(kCFAllocatorDefault, imageSide, imageSide, CVPixelBufferGetPixelFormatType(pixelBuffer), nil, &resizeBuffer)
ciContext.render(ciImage, to: resizeBuffer!)
return resizeBuffer
}
override func viewWillAppear(_ animated: Bool) {
super.viewWillAppear(animated)
guard let videoCapture = videoCapture else {return}
videoCapture.startCapture()
}
override func viewDidLayoutSubviews() {
super.viewDidLayoutSubviews()
guard let videoCapture = videoCapture else {return}
videoCapture.resizePreview()
}
override func viewWillDisappear(_ animated: Bool) {
guard let videoCapture = videoCapture else {return}
videoCapture.stopCapture()
navigationController?.setNavigationBarHidden(false, animated: true)
super.viewWillDisappear(animated)
}
override func didReceiveMemoryWarning() {
super.didReceiveMemoryWarning()
}
}
================================================
FILE: CoreMLSample.xcodeproj/project.pbxproj
================================================
// !$*UTF8*$!
{
archiveVersion = 1;
classes = {
};
objectVersion = 48;
objects = {
/* Begin PBXBuildFile section */
A415CAB01EEA437900983607 /* AppDelegate.swift in Sources */ = {isa = PBXBuildFile; fileRef = A415CAAF1EEA437900983607 /* AppDelegate.swift */; };
A415CAB21EEA437900983607 /* ViewController.swift in Sources */ = {isa = PBXBuildFile; fileRef = A415CAB11EEA437900983607 /* ViewController.swift */; };
A415CAB71EEA437900983607 /* Assets.xcassets in Resources */ = {isa = PBXBuildFile; fileRef = A415CAB61EEA437900983607 /* Assets.xcassets */; };
A415CABA1EEA437900983607 /* LaunchScreen.storyboard in Resources */ = {isa = PBXBuildFile; fileRef = A415CAB81EEA437900983607 /* LaunchScreen.storyboard */; };
A415CACF1EEA4B0300983607 /* Inceptionv3.mlmodel in Sources */ = {isa = PBXBuildFile; fileRef = A415CACE1EEA4AF300983607 /* Inceptionv3.mlmodel */; };
A415CAD61EEA4C0800983607 /* VideoCapture.swift in Sources */ = {isa = PBXBuildFile; fileRef = A415CAD31EEA4BA100983607 /* VideoCapture.swift */; };
A415CAD71EEA4C0D00983607 /* VideoCameraType.swift in Sources */ = {isa = PBXBuildFile; fileRef = A415CAD21EEA4BA100983607 /* VideoCameraType.swift */; };
A415CAD81EEA4C1000983607 /* AVCaptureDevice+Extension.swift in Sources */ = {isa = PBXBuildFile; fileRef = A415CAD11EEA4BA100983607 /* AVCaptureDevice+Extension.swift */; };
A415CAD91EEA4C1800983607 /* Main.storyboard in Resources */ = {isa = PBXBuildFile; fileRef = A415CAD51EEA4BC900983607 /* Main.storyboard */; };
/* End PBXBuildFile section */
/* Begin PBXFileReference section */
A415CAAC1EEA437900983607 /* CoreMLSample.app */ = {isa = PBXFileReference; explicitFileType = wrapper.application; includeInIndex = 0; path = CoreMLSample.app; sourceTree = BUILT_PRODUCTS_DIR; };
A415CAAF1EEA437900983607 /* AppDelegate.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = AppDelegate.swift; sourceTree = "<group>"; };
A415CAB11EEA437900983607 /* ViewController.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ViewController.swift; sourceTree = "<group>"; };
A415CAB61EEA437900983607 /* Assets.xcassets */ = {isa = PBXFileReference; lastKnownFileType = folder.assetcatalog; path = Assets.xcassets; sourceTree = "<group>"; };
A415CAB91EEA437900983607 /* Base */ = {isa = PBXFileReference; lastKnownFileType = file.storyboard; name = Base; path = Base.lproj/LaunchScreen.storyboard; sourceTree = "<group>"; };
A415CABB1EEA437900983607 /* Info.plist */ = {isa = PBXFileReference; lastKnownFileType = text.plist.xml; path = Info.plist; sourceTree = "<group>"; };
A415CACE1EEA4AF300983607 /* Inceptionv3.mlmodel */ = {isa = PBXFileReference; lastKnownFileType = file.mlmodel; path = Inceptionv3.mlmodel; sourceTree = "<group>"; };
A415CAD11EEA4BA100983607 /* AVCaptureDevice+Extension.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "AVCaptureDevice+Extension.swift"; sourceTree = "<group>"; };
A415CAD21EEA4BA100983607 /* VideoCameraType.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = VideoCameraType.swift; sourceTree = "<group>"; };
A415CAD31EEA4BA100983607 /* VideoCapture.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = VideoCapture.swift; sourceTree = "<group>"; };
A415CAD51EEA4BC900983607 /* Main.storyboard */ = {isa = PBXFileReference; lastKnownFileType = file.storyboard; path = Main.storyboard; sourceTree = "<group>"; };
/* End PBXFileReference section */
/* Begin PBXFrameworksBuildPhase section */
A415CAA91EEA437900983607 /* Frameworks */ = {
isa = PBXFrameworksBuildPhase;
buildActionMask = 2147483647;
files = (
);
runOnlyForDeploymentPostprocessing = 0;
};
/* End PBXFrameworksBuildPhase section */
/* Begin PBXGroup section */
A415CAA31EEA437900983607 = {
isa = PBXGroup;
children = (
A415CAAE1EEA437900983607 /* CoreMLSample */,
A415CAAD1EEA437900983607 /* Products */,
);
sourceTree = "<group>";
};
A415CAAD1EEA437900983607 /* Products */ = {
isa = PBXGroup;
children = (
A415CAAC1EEA437900983607 /* CoreMLSample.app */,
);
name = Products;
sourceTree = "<group>";
};
A415CAAE1EEA437900983607 /* CoreMLSample */ = {
isa = PBXGroup;
children = (
A415CACE1EEA4AF300983607 /* Inceptionv3.mlmodel */,
A415CAD01EEA4BA100983607 /* VideoCapture */,
A415CAAF1EEA437900983607 /* AppDelegate.swift */,
A415CAB11EEA437900983607 /* ViewController.swift */,
A415CAD51EEA4BC900983607 /* Main.storyboard */,
A415CAB61EEA437900983607 /* Assets.xcassets */,
A415CAB81EEA437900983607 /* LaunchScreen.storyboard */,
A415CABB1EEA437900983607 /* Info.plist */,
);
path = CoreMLSample;
sourceTree = "<group>";
};
A415CAD01EEA4BA100983607 /* VideoCapture */ = {
isa = PBXGroup;
children = (
A415CAD11EEA4BA100983607 /* AVCaptureDevice+Extension.swift */,
A415CAD21EEA4BA100983607 /* VideoCameraType.swift */,
A415CAD31EEA4BA100983607 /* VideoCapture.swift */,
);
path = VideoCapture;
sourceTree = "<group>";
};
/* End PBXGroup section */
/* Begin PBXNativeTarget section */
A415CAAB1EEA437900983607 /* CoreMLSample */ = {
isa = PBXNativeTarget;
buildConfigurationList = A415CABE1EEA437900983607 /* Build configuration list for PBXNativeTarget "CoreMLSample" */;
buildPhases = (
A415CAA81EEA437900983607 /* Sources */,
A415CAA91EEA437900983607 /* Frameworks */,
A415CAAA1EEA437900983607 /* Resources */,
);
buildRules = (
);
dependencies = (
);
name = CoreMLSample;
productName = CoreMLSimple;
productReference = A415CAAC1EEA437900983607 /* CoreMLSample.app */;
productType = "com.apple.product-type.application";
};
/* End PBXNativeTarget section */
/* Begin PBXProject section */
A415CAA41EEA437900983607 /* Project object */ = {
isa = PBXProject;
attributes = {
LastSwiftUpdateCheck = 0900;
LastUpgradeCheck = 0900;
ORGANIZATIONNAME = "杨萧玉";
TargetAttributes = {
A415CAAB1EEA437900983607 = {
CreatedOnToolsVersion = 9.0;
LastSwiftMigration = 0900;
};
};
};
buildConfigurationList = A415CAA71EEA437900983607 /* Build configuration list for PBXProject "CoreMLSample" */;
compatibilityVersion = "Xcode 8.0";
developmentRegion = en;
hasScannedForEncodings = 0;
knownRegions = (
en,
Base,
);
mainGroup = A415CAA31EEA437900983607;
productRefGroup = A415CAAD1EEA437900983607 /* Products */;
projectDirPath = "";
projectRoot = "";
targets = (
A415CAAB1EEA437900983607 /* CoreMLSample */,
);
};
/* End PBXProject section */
/* Begin PBXResourcesBuildPhase section */
A415CAAA1EEA437900983607 /* Resources */ = {
isa = PBXResourcesBuildPhase;
buildActionMask = 2147483647;
files = (
A415CAD91EEA4C1800983607 /* Main.storyboard in Resources */,
A415CABA1EEA437900983607 /* LaunchScreen.storyboard in Resources */,
A415CAB71EEA437900983607 /* Assets.xcassets in Resources */,
);
runOnlyForDeploymentPostprocessing = 0;
};
/* End PBXResourcesBuildPhase section */
/* Begin PBXSourcesBuildPhase section */
A415CAA81EEA437900983607 /* Sources */ = {
isa = PBXSourcesBuildPhase;
buildActionMask = 2147483647;
files = (
A415CAD61EEA4C0800983607 /* VideoCapture.swift in Sources */,
A415CAB21EEA437900983607 /* ViewController.swift in Sources */,
A415CACF1EEA4B0300983607 /* Inceptionv3.mlmodel in Sources */,
A415CAD71EEA4C0D00983607 /* VideoCameraType.swift in Sources */,
A415CAB01EEA437900983607 /* AppDelegate.swift in Sources */,
A415CAD81EEA4C1000983607 /* AVCaptureDevice+Extension.swift in Sources */,
);
runOnlyForDeploymentPostprocessing = 0;
};
/* End PBXSourcesBuildPhase section */
/* Begin PBXVariantGroup section */
A415CAB81EEA437900983607 /* LaunchScreen.storyboard */ = {
isa = PBXVariantGroup;
children = (
A415CAB91EEA437900983607 /* Base */,
);
name = LaunchScreen.storyboard;
sourceTree = "<group>";
};
/* End PBXVariantGroup section */
/* Begin XCBuildConfiguration section */
A415CABC1EEA437900983607 /* Debug */ = {
isa = XCBuildConfiguration;
buildSettings = {
ALWAYS_SEARCH_USER_PATHS = NO;
CLANG_ANALYZER_NONNULL = YES;
CLANG_ANALYZER_NUMBER_OBJECT_CONVERSION = YES_AGGRESSIVE;
CLANG_CXX_LANGUAGE_STANDARD = "gnu++14";
CLANG_CXX_LIBRARY = "libc++";
CLANG_ENABLE_MODULES = YES;
CLANG_ENABLE_OBJC_ARC = YES;
CLANG_WARN_BLOCK_CAPTURE_AUTORELEASING = YES;
CLANG_WARN_BOOL_CONVERSION = YES;
CLANG_WARN_COMMA = YES;
CLANG_WARN_CONSTANT_CONVERSION = YES;
CLANG_WARN_DIRECT_OBJC_ISA_USAGE = YES_ERROR;
CLANG_WARN_DOCUMENTATION_COMMENTS = YES;
CLANG_WARN_EMPTY_BODY = YES;
CLANG_WARN_ENUM_CONVERSION = YES;
CLANG_WARN_INFINITE_RECURSION = YES;
CLANG_WARN_INT_CONVERSION = YES;
CLANG_WARN_OBJC_ROOT_CLASS = YES_ERROR;
CLANG_WARN_RANGE_LOOP_ANALYSIS = YES;
CLANG_WARN_STRICT_PROTOTYPES = YES;
CLANG_WARN_SUSPICIOUS_MOVE = YES;
CLANG_WARN_UNGUARDED_AVAILABILITY = YES_AGGRESSIVE;
CLANG_WARN_UNREACHABLE_CODE = YES;
CLANG_WARN__DUPLICATE_METHOD_MATCH = YES;
CODE_SIGN_IDENTITY = "iPhone Developer";
COPY_PHASE_STRIP = NO;
DEBUG_INFORMATION_FORMAT = dwarf;
ENABLE_STRICT_OBJC_MSGSEND = YES;
ENABLE_TESTABILITY = YES;
GCC_C_LANGUAGE_STANDARD = gnu11;
GCC_DYNAMIC_NO_PIC = NO;
GCC_NO_COMMON_BLOCKS = YES;
GCC_OPTIMIZATION_LEVEL = 0;
GCC_PREPROCESSOR_DEFINITIONS = (
"DEBUG=1",
"$(inherited)",
);
GCC_WARN_64_TO_32_BIT_CONVERSION = YES;
GCC_WARN_ABOUT_RETURN_TYPE = YES_ERROR;
GCC_WARN_UNDECLARED_SELECTOR = YES;
GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE;
GCC_WARN_UNUSED_FUNCTION = YES;
GCC_WARN_UNUSED_VARIABLE = YES;
IPHONEOS_DEPLOYMENT_TARGET = 11.0;
MTL_ENABLE_DEBUG_INFO = YES;
ONLY_ACTIVE_ARCH = YES;
SDKROOT = iphoneos;
SWIFT_ACTIVE_COMPILATION_CONDITIONS = DEBUG;
SWIFT_OPTIMIZATION_LEVEL = "-Onone";
};
name = Debug;
};
A415CABD1EEA437900983607 /* Release */ = {
isa = XCBuildConfiguration;
buildSettings = {
ALWAYS_SEARCH_USER_PATHS = NO;
CLANG_ANALYZER_NONNULL = YES;
CLANG_ANALYZER_NUMBER_OBJECT_CONVERSION = YES_AGGRESSIVE;
CLANG_CXX_LANGUAGE_STANDARD = "gnu++14";
CLANG_CXX_LIBRARY = "libc++";
CLANG_ENABLE_MODULES = YES;
CLANG_ENABLE_OBJC_ARC = YES;
CLANG_WARN_BLOCK_CAPTURE_AUTORELEASING = YES;
CLANG_WARN_BOOL_CONVERSION = YES;
CLANG_WARN_COMMA = YES;
CLANG_WARN_CONSTANT_CONVERSION = YES;
CLANG_WARN_DIRECT_OBJC_ISA_USAGE = YES_ERROR;
CLANG_WARN_DOCUMENTATION_COMMENTS = YES;
CLANG_WARN_EMPTY_BODY = YES;
CLANG_WARN_ENUM_CONVERSION = YES;
CLANG_WARN_INFINITE_RECURSION = YES;
CLANG_WARN_INT_CONVERSION = YES;
CLANG_WARN_OBJC_ROOT_CLASS = YES_ERROR;
CLANG_WARN_RANGE_LOOP_ANALYSIS = YES;
CLANG_WARN_STRICT_PROTOTYPES = YES;
CLANG_WARN_SUSPICIOUS_MOVE = YES;
CLANG_WARN_UNGUARDED_AVAILABILITY = YES_AGGRESSIVE;
CLANG_WARN_UNREACHABLE_CODE = YES;
CLANG_WARN__DUPLICATE_METHOD_MATCH = YES;
CODE_SIGN_IDENTITY = "iPhone Developer";
COPY_PHASE_STRIP = NO;
DEBUG_INFORMATION_FORMAT = "dwarf-with-dsym";
ENABLE_NS_ASSERTIONS = NO;
ENABLE_STRICT_OBJC_MSGSEND = YES;
GCC_C_LANGUAGE_STANDARD = gnu11;
GCC_NO_COMMON_BLOCKS = YES;
GCC_WARN_64_TO_32_BIT_CONVERSION = YES;
GCC_WARN_ABOUT_RETURN_TYPE = YES_ERROR;
GCC_WARN_UNDECLARED_SELECTOR = YES;
GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE;
GCC_WARN_UNUSED_FUNCTION = YES;
GCC_WARN_UNUSED_VARIABLE = YES;
IPHONEOS_DEPLOYMENT_TARGET = 11.0;
MTL_ENABLE_DEBUG_INFO = NO;
SDKROOT = iphoneos;
SWIFT_OPTIMIZATION_LEVEL = "-Owholemodule";
VALIDATE_PRODUCT = YES;
};
name = Release;
};
A415CABF1EEA437900983607 /* Debug */ = {
isa = XCBuildConfiguration;
buildSettings = {
ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon;
DEVELOPMENT_TEAM = D3RCVUP6VH;
INFOPLIST_FILE = "$(SRCROOT)/CoreMLSample/Info.plist";
IPHONEOS_DEPLOYMENT_TARGET = 11.0;
LD_RUNPATH_SEARCH_PATHS = "$(inherited) @executable_path/Frameworks";
PRODUCT_BUNDLE_IDENTIFIER = com.yulingtianxia.CoreMLSimple;
PRODUCT_NAME = "$(TARGET_NAME)";
SWIFT_SWIFT3_OBJC_INFERENCE = Off;
SWIFT_VERSION = 4.0;
TARGETED_DEVICE_FAMILY = "1,2";
};
name = Debug;
};
A415CAC01EEA437900983607 /* Release */ = {
isa = XCBuildConfiguration;
buildSettings = {
ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon;
DEVELOPMENT_TEAM = D3RCVUP6VH;
INFOPLIST_FILE = "$(SRCROOT)/CoreMLSample/Info.plist";
IPHONEOS_DEPLOYMENT_TARGET = 11.0;
LD_RUNPATH_SEARCH_PATHS = "$(inherited) @executable_path/Frameworks";
PRODUCT_BUNDLE_IDENTIFIER = com.yulingtianxia.CoreMLSimple;
PRODUCT_NAME = "$(TARGET_NAME)";
SWIFT_SWIFT3_OBJC_INFERENCE = Off;
SWIFT_VERSION = 4.0;
TARGETED_DEVICE_FAMILY = "1,2";
};
name = Release;
};
/* End XCBuildConfiguration section */
/* Begin XCConfigurationList section */
A415CAA71EEA437900983607 /* Build configuration list for PBXProject "CoreMLSample" */ = {
isa = XCConfigurationList;
buildConfigurations = (
A415CABC1EEA437900983607 /* Debug */,
A415CABD1EEA437900983607 /* Release */,
);
defaultConfigurationIsVisible = 0;
defaultConfigurationName = Release;
};
A415CABE1EEA437900983607 /* Build configuration list for PBXNativeTarget "CoreMLSample" */ = {
isa = XCConfigurationList;
buildConfigurations = (
A415CABF1EEA437900983607 /* Debug */,
A415CAC01EEA437900983607 /* Release */,
);
defaultConfigurationIsVisible = 0;
defaultConfigurationName = Release;
};
/* End XCConfigurationList section */
};
rootObject = A415CAA41EEA437900983607 /* Project object */;
}
================================================
FILE: CoreMLSample.xcodeproj/project.xcworkspace/contents.xcworkspacedata
================================================
<?xml version="1.0" encoding="UTF-8"?>
<Workspace
version = "1.0">
<FileRef
location = "self:/Users/yangxiaoyu/Code/Core-ML-Simple/CoreMLSample.xcodeproj">
</FileRef>
</Workspace>
================================================
FILE: LICENSE
================================================
MIT License
Copyright (c) 2017 杨萧玉
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
================================================
FILE: README.md
================================================
# Core-ML-Sample
A Demo using Core ML, Vision Framework and Swift 4.

This demo is based on Inception V3 network.
You must run it with Xcode 9 and iOS 11.
Article: http://yulingtianxia.com/blog/2017/06/19/Core-ML-and-Vision-Framework-on-iOS-11/
Thanks to [iOS-10-Sampler](https://github.com/shu223/iOS-10-Sampler).
gitextract_ixdvp4cm/ ├── .gitignore ├── CoreMLSample/ │ ├── AppDelegate.swift │ ├── Assets.xcassets/ │ │ └── AppIcon.appiconset/ │ │ └── Contents.json │ ├── Base.lproj/ │ │ ├── LaunchScreen.storyboard │ │ └── Main.storyboard │ ├── Inceptionv3.mlmodel │ ├── Info.plist │ ├── Main.storyboard │ ├── VideoCapture/ │ │ ├── AVCaptureDevice+Extension.swift │ │ ├── VideoCameraType.swift │ │ └── VideoCapture.swift │ └── ViewController.swift ├── CoreMLSample.xcodeproj/ │ ├── project.pbxproj │ └── project.xcworkspace/ │ └── contents.xcworkspacedata ├── LICENSE └── README.md
Condensed preview — 16 files, each showing path, character count, and a content snippet. Download the .json file or copy for the full structured content (53K chars).
[
{
"path": ".gitignore",
"chars": 1448,
"preview": "# Xcode\n#\n# gitignore contributors: remember to update Global/Xcode.gitignore, Objective-C.gitignore & Swift.gitignore\n\n"
},
{
"path": "CoreMLSample/AppDelegate.swift",
"chars": 2161,
"preview": "//\n// AppDelegate.swift\n// CoreMLSimple\n//\n// Created by 杨萧玉 on 2017/6/9.\n// Copyright © 2017年 杨萧玉. All rights reser"
},
{
"path": "CoreMLSample/Assets.xcassets/AppIcon.appiconset/Contents.json",
"chars": 1635,
"preview": "{\n \"images\" : [\n {\n \"idiom\" : \"iphone\",\n \"size\" : \"20x20\",\n \"scale\" : \"2x\"\n },\n {\n \"idiom\""
},
{
"path": "CoreMLSample/Base.lproj/LaunchScreen.storyboard",
"chars": 1740,
"preview": "<?xml version=\"1.0\" encoding=\"UTF-8\" standalone=\"no\"?>\n<document type=\"com.apple.InterfaceBuilder3.CocoaTouch.Storyboard"
},
{
"path": "CoreMLSample/Base.lproj/Main.storyboard",
"chars": 1695,
"preview": "<?xml version=\"1.0\" encoding=\"UTF-8\" standalone=\"no\"?>\n<document type=\"com.apple.InterfaceBuilder3.CocoaTouch.Storyboard"
},
{
"path": "CoreMLSample/Info.plist",
"chars": 1543,
"preview": "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n<!DOCTYPE plist PUBLIC \"-//Apple//DTD PLIST 1.0//EN\" \"http://www.apple.com/DTDs/P"
},
{
"path": "CoreMLSample/Main.storyboard",
"chars": 8683,
"preview": "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n<document type=\"com.apple.InterfaceBuilder3.CocoaTouch.Storyboard.XIB\" version=\"3"
},
{
"path": "CoreMLSample/VideoCapture/AVCaptureDevice+Extension.swift",
"chars": 3064,
"preview": "//\n// AppDelegate.swift\n// CoreMLSimple\n//\n// Created by 杨萧玉 on 2017/6/9.\n// Copyright © 2017年 杨萧玉. All rights reser"
},
{
"path": "CoreMLSample/VideoCapture/VideoCameraType.swift",
"chars": 733,
"preview": "//\n// AppDelegate.swift\n// CoreMLSimple\n//\n// Created by 杨萧玉 on 2017/6/9.\n// Copyright © 2017年 杨萧玉. All rights reser"
},
{
"path": "CoreMLSample/VideoCapture/VideoCapture.swift",
"chars": 4273,
"preview": "//\n// AppDelegate.swift\n// CoreMLSimple\n//\n// Created by 杨萧玉 on 2017/6/9.\n// Copyright © 2017年 杨萧玉. All rights reser"
},
{
"path": "CoreMLSample/ViewController.swift",
"chars": 6491,
"preview": "//\n// ViewController.swift\n// CoreMLSimple\n//\n// Created by 杨萧玉 on 2017/6/9.\n// Copyright © 2017年 杨萧玉. All rights re"
},
{
"path": "CoreMLSample.xcodeproj/project.pbxproj",
"chars": 13919,
"preview": "// !$*UTF8*$!\n{\n\tarchiveVersion = 1;\n\tclasses = {\n\t};\n\tobjectVersion = 48;\n\tobjects = {\n\n/* Begin PBXBuildFile section *"
},
{
"path": "CoreMLSample.xcodeproj/project.xcworkspace/contents.xcworkspacedata",
"chars": 195,
"preview": "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n<Workspace\n version = \"1.0\">\n <FileRef\n location = \"self:/Users/yangxiao"
},
{
"path": "LICENSE",
"chars": 1060,
"preview": "MIT License\n\nCopyright (c) 2017 杨萧玉\n\nPermission is hereby granted, free of charge, to any person obtaining a copy\nof thi"
},
{
"path": "README.md",
"chars": 336,
"preview": "# Core-ML-Sample\n\nA Demo using Core ML, Vision Framework and Swift 4.\n\n\n\nThis demo is based on Inception "
}
]
// ... and 1 more files (download for full content)
About this extraction
This page contains the full source code of the yulingtianxia/Core-ML-Sample GitHub repository, extracted and formatted as plain text for AI agents and large language models (LLMs). The extraction includes 16 files (90.4 MB), approximately 13.0k tokens. Use this with OpenClaw, Claude, ChatGPT, Cursor, Windsurf, or any other AI tool that accepts text input. You can copy the full output to your clipboard or download it as a .txt file.
Extracted by GitExtract — free GitHub repo to text converter for AI. Built by Nikandr Surkov.