Skip to content

Commit

Permalink
Fixes with Xcode 9 (#14)
Browse files Browse the repository at this point in the history
* Update with Xcode 9

* Fixed AVFoundation changes

* Fixed changes of NSAttriubtedString

* Fixed MPSCNN sample for iOS 11 / Swift 4 / Xcode 9

* Fixed more

* more fixes

* Error: the descriptor must be configured with MTLStorageModePrivate'

* commented out

* updated .gitignore

* Fixed build errors

* Fixed build errors

* Disabled "Swift 3 @objc inference"
  • Loading branch information
shu223 authored Sep 20, 2017
1 parent 0538e71 commit 0d3bd7f
Show file tree
Hide file tree
Showing 17 changed files with 138 additions and 110 deletions.
2 changes: 2 additions & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,8 @@
#
# gitignore contributors: remember to update Global/Xcode.gitignore, Objective-C.gitignore & Swift.gitignore

iOS-10-Sampler/Resources/network_params/logos

## Build generated
build/
DerivedData/
Expand Down
35 changes: 28 additions & 7 deletions iOS-10-Sampler.xcodeproj/project.pbxproj
Original file line number Diff line number Diff line change
Expand Up @@ -1277,7 +1277,7 @@
isa = PBXProject;
attributes = {
LastSwiftUpdateCheck = 0800;
LastUpgradeCheck = 0810;
LastUpgradeCheck = 0900;
ORGANIZATIONNAME = "Shuichi Tsutsumi";
TargetAttributes = {
8A1CC38B1D7AC22E00562709 = {
Expand All @@ -1288,17 +1288,20 @@
8AB430541D7A6B4900A3BD98 = {
CreatedOnToolsVersion = 8.0;
DevelopmentTeam = 9Z86A4AWDE;
LastSwiftMigration = 0900;
ProvisioningStyle = Automatic;
};
8AB430681D7A6B4A00A3BD98 = {
CreatedOnToolsVersion = 8.0;
DevelopmentTeam = 9Z86A4AWDE;
LastSwiftMigration = 0900;
ProvisioningStyle = Automatic;
TestTargetID = 8AB430541D7A6B4900A3BD98;
};
8AB430731D7A6B4A00A3BD98 = {
CreatedOnToolsVersion = 8.0;
DevelopmentTeam = 9Z86A4AWDE;
LastSwiftMigration = 0900;
ProvisioningStyle = Automatic;
TestTargetID = 8AB430541D7A6B4900A3BD98;
};
Expand Down Expand Up @@ -1717,15 +1720,21 @@
CLANG_CXX_LIBRARY = "libc++";
CLANG_ENABLE_MODULES = YES;
CLANG_ENABLE_OBJC_ARC = YES;
CLANG_WARN_BLOCK_CAPTURE_AUTORELEASING = YES;
CLANG_WARN_BOOL_CONVERSION = YES;
CLANG_WARN_COMMA = YES;
CLANG_WARN_CONSTANT_CONVERSION = YES;
CLANG_WARN_DIRECT_OBJC_ISA_USAGE = YES_ERROR;
CLANG_WARN_DOCUMENTATION_COMMENTS = YES;
CLANG_WARN_EMPTY_BODY = YES;
CLANG_WARN_ENUM_CONVERSION = YES;
CLANG_WARN_INFINITE_RECURSION = YES;
CLANG_WARN_INT_CONVERSION = YES;
CLANG_WARN_NON_LITERAL_NULL_CONVERSION = YES;
CLANG_WARN_OBJC_LITERAL_CONVERSION = YES;
CLANG_WARN_OBJC_ROOT_CLASS = YES_ERROR;
CLANG_WARN_RANGE_LOOP_ANALYSIS = YES;
CLANG_WARN_STRICT_PROTOTYPES = YES;
CLANG_WARN_SUSPICIOUS_MOVE = YES;
CLANG_WARN_SUSPICIOUS_MOVES = YES;
CLANG_WARN_UNREACHABLE_CODE = YES;
Expand Down Expand Up @@ -1767,15 +1776,21 @@
CLANG_CXX_LIBRARY = "libc++";
CLANG_ENABLE_MODULES = YES;
CLANG_ENABLE_OBJC_ARC = YES;
CLANG_WARN_BLOCK_CAPTURE_AUTORELEASING = YES;
CLANG_WARN_BOOL_CONVERSION = YES;
CLANG_WARN_COMMA = YES;
CLANG_WARN_CONSTANT_CONVERSION = YES;
CLANG_WARN_DIRECT_OBJC_ISA_USAGE = YES_ERROR;
CLANG_WARN_DOCUMENTATION_COMMENTS = YES;
CLANG_WARN_EMPTY_BODY = YES;
CLANG_WARN_ENUM_CONVERSION = YES;
CLANG_WARN_INFINITE_RECURSION = YES;
CLANG_WARN_INT_CONVERSION = YES;
CLANG_WARN_NON_LITERAL_NULL_CONVERSION = YES;
CLANG_WARN_OBJC_LITERAL_CONVERSION = YES;
CLANG_WARN_OBJC_ROOT_CLASS = YES_ERROR;
CLANG_WARN_RANGE_LOOP_ANALYSIS = YES;
CLANG_WARN_STRICT_PROTOTYPES = YES;
CLANG_WARN_SUSPICIOUS_MOVE = YES;
CLANG_WARN_SUSPICIOUS_MOVES = YES;
CLANG_WARN_UNREACHABLE_CODE = YES;
Expand Down Expand Up @@ -1811,7 +1826,8 @@
PRODUCT_BUNDLE_IDENTIFIER = "com.shu223.iOS-10-Sampler";
PRODUCT_NAME = "$(TARGET_NAME)";
SWIFT_OBJC_BRIDGING_HEADER = "iOS-10-Sampler/iOS10Sampler-Bridging-Header.h";
SWIFT_VERSION = 3.0;
SWIFT_SWIFT3_OBJC_INFERENCE = Off;
SWIFT_VERSION = 4.0;
};
name = Debug;
};
Expand All @@ -1825,7 +1841,8 @@
PRODUCT_BUNDLE_IDENTIFIER = "com.shu223.iOS-10-Sampler";
PRODUCT_NAME = "$(TARGET_NAME)";
SWIFT_OBJC_BRIDGING_HEADER = "iOS-10-Sampler/iOS10Sampler-Bridging-Header.h";
SWIFT_VERSION = 3.0;
SWIFT_SWIFT3_OBJC_INFERENCE = Off;
SWIFT_VERSION = 4.0;
};
name = Release;
};
Expand All @@ -1839,7 +1856,8 @@
LD_RUNPATH_SEARCH_PATHS = "$(inherited) @executable_path/Frameworks @loader_path/Frameworks";
PRODUCT_BUNDLE_IDENTIFIER = "com.shu223.iOS-10-SamplerTests";
PRODUCT_NAME = "$(TARGET_NAME)";
SWIFT_VERSION = 3.0;
SWIFT_SWIFT3_OBJC_INFERENCE = On;
SWIFT_VERSION = 4.0;
TEST_HOST = "$(BUILT_PRODUCTS_DIR)/iOS-10-Sampler.app/iOS-10-Sampler";
};
name = Debug;
Expand All @@ -1854,7 +1872,8 @@
LD_RUNPATH_SEARCH_PATHS = "$(inherited) @executable_path/Frameworks @loader_path/Frameworks";
PRODUCT_BUNDLE_IDENTIFIER = "com.shu223.iOS-10-SamplerTests";
PRODUCT_NAME = "$(TARGET_NAME)";
SWIFT_VERSION = 3.0;
SWIFT_SWIFT3_OBJC_INFERENCE = On;
SWIFT_VERSION = 4.0;
TEST_HOST = "$(BUILT_PRODUCTS_DIR)/iOS-10-Sampler.app/iOS-10-Sampler";
};
name = Release;
Expand All @@ -1868,7 +1887,8 @@
LD_RUNPATH_SEARCH_PATHS = "$(inherited) @executable_path/Frameworks @loader_path/Frameworks";
PRODUCT_BUNDLE_IDENTIFIER = "com.shu223.iOS-10-SamplerUITests";
PRODUCT_NAME = "$(TARGET_NAME)";
SWIFT_VERSION = 3.0;
SWIFT_SWIFT3_OBJC_INFERENCE = On;
SWIFT_VERSION = 4.0;
TEST_TARGET_NAME = "iOS-10-Sampler";
};
name = Debug;
Expand All @@ -1882,7 +1902,8 @@
LD_RUNPATH_SEARCH_PATHS = "$(inherited) @executable_path/Frameworks @loader_path/Frameworks";
PRODUCT_BUNDLE_IDENTIFIER = "com.shu223.iOS-10-SamplerUITests";
PRODUCT_NAME = "$(TARGET_NAME)";
SWIFT_VERSION = 3.0;
SWIFT_SWIFT3_OBJC_INFERENCE = On;
SWIFT_VERSION = 4.0;
TEST_TARGET_NAME = "iOS-10-Sampler";
};
name = Release;
Expand Down
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
<?xml version="1.0" encoding="UTF-8"?>
<Scheme
LastUpgradeVersion = "0810"
LastUpgradeVersion = "0900"
version = "1.3">
<BuildAction
parallelizeBuildables = "YES"
Expand Down
6 changes: 3 additions & 3 deletions iOS-10-Sampler/Samples/AttributedSpeechViewController.swift
Original file line number Diff line number Diff line change
Expand Up @@ -24,9 +24,9 @@ class AttributedSpeechViewController: UIViewController {
attributedStr = NSMutableAttributedString(string: baseStr)
let rangeAll = NSMakeRange(0, baseStr.characters.count)
let rangeBold = NSString(string: baseStr).range(of: "iOS")
attributedStr.addAttributes([NSFontAttributeName: UIFont.systemFont(ofSize: 14)], range: rangeAll)
attributedStr.addAttributes([NSForegroundColorAttributeName: UIColor.black], range: rangeAll)
attributedStr.addAttributes([NSFontAttributeName: UIFont.boldSystemFont(ofSize: 20)], range: rangeBold)
attributedStr.addAttributes([NSAttributedStringKey.font: UIFont.systemFont(ofSize: 14)], range: rangeAll)
attributedStr.addAttributes([NSAttributedStringKey.foregroundColor: UIColor.black], range: rangeAll)
attributedStr.addAttributes([NSAttributedStringKey.font: UIFont.boldSystemFont(ofSize: 20)], range: rangeBold)

updateUtterance(attributed: false)
}
Expand Down
6 changes: 5 additions & 1 deletion iOS-10-Sampler/Samples/Inception3Net.swift
Original file line number Diff line number Diff line change
Expand Up @@ -1063,7 +1063,11 @@ class Inception3Net{
// In this sample code, the aggregate benefit of the use of MPSTemporaryImages
// is to reduce the area of memory allocated to 1/4 and save about 3 ms of CPU
// time.
MPSTemporaryImage.prefetchStorage(with: commandBuffer, imageDescriptorList: [sid, inid, m0id, m1id, m2id, m3id, m4id, m5id, m6id, m7id, m8id, m9id, m10id])
let descriptors = [sid, inid, m0id, m1id, m2id, m3id, m4id, m5id, m6id, m7id, m8id, m9id, m10id]
for descriptor in descriptors {
descriptor.storageMode = .private
}
MPSTemporaryImage.prefetchStorage(with: commandBuffer, imageDescriptorList: descriptors)

// we use preImage to hold preprocesing intermediate results
preImage = MPSTemporaryImage(commandBuffer: commandBuffer, imageDescriptor: sid)
Expand Down
10 changes: 5 additions & 5 deletions iOS-10-Sampler/Samples/LivePhotoCaptureDelegate.swift
Original file line number Diff line number Diff line change
Expand Up @@ -63,13 +63,13 @@ class LivePhotoCaptureDelegate: NSObject, AVCapturePhotoCaptureDelegate {
// =========================================================================
// MARK: - AVCapturePhotoCaptureDelegate

func capture(_ captureOutput: AVCapturePhotoOutput, willBeginCaptureForResolvedSettings resolvedSettings: AVCaptureResolvedPhotoSettings) {
func photoOutput(_ captureOutput: AVCapturePhotoOutput, willBeginCaptureFor resolvedSettings: AVCaptureResolvedPhotoSettings) {
if resolvedSettings.livePhotoMovieDimensions.width > 0 && resolvedSettings.livePhotoMovieDimensions.height > 0 {
capturingLivePhoto(true)
}
}

func capture(_ captureOutput: AVCapturePhotoOutput, didFinishProcessingPhotoSampleBuffer photoSampleBuffer: CMSampleBuffer?, previewPhotoSampleBuffer: CMSampleBuffer?, resolvedSettings: AVCaptureResolvedPhotoSettings, bracketSettings: AVCaptureBracketedStillImageSettings?, error: Error?) {
func photoOutput(_ captureOutput: AVCapturePhotoOutput, didFinishProcessingPhoto photoSampleBuffer: CMSampleBuffer?, previewPhoto previewPhotoSampleBuffer: CMSampleBuffer?, resolvedSettings: AVCaptureResolvedPhotoSettings, bracketSettings: AVCaptureBracketedStillImageSettings?, error: Error?) {
if let photoSampleBuffer = photoSampleBuffer {
photoData = AVCapturePhotoOutput.jpegPhotoDataRepresentation(forJPEGSampleBuffer: photoSampleBuffer, previewPhotoSampleBuffer: previewPhotoSampleBuffer)
}
Expand All @@ -78,11 +78,11 @@ class LivePhotoCaptureDelegate: NSObject, AVCapturePhotoCaptureDelegate {
}
}

func capture(_ captureOutput: AVCapturePhotoOutput, didFinishRecordingLivePhotoMovieForEventualFileAt outputFileURL: URL, resolvedSettings: AVCaptureResolvedPhotoSettings) {
func photoOutput(_ captureOutput: AVCapturePhotoOutput, didFinishRecordingLivePhotoMovieForEventualFileAt outputFileURL: URL, resolvedSettings: AVCaptureResolvedPhotoSettings) {
capturingLivePhoto(false)
}

func capture(_ captureOutput: AVCapturePhotoOutput, didFinishProcessingLivePhotoToMovieFileAt outputFileURL: URL, duration: CMTime, photoDisplay photoDisplayTime: CMTime, resolvedSettings: AVCaptureResolvedPhotoSettings, error: Error?) {
func photoOutput(_ captureOutput: AVCapturePhotoOutput, didFinishProcessingLivePhotoToMovieFileAt outputFileURL: URL, duration: CMTime, photoDisplayTime: CMTime, resolvedSettings: AVCaptureResolvedPhotoSettings, error: Error?) {
if let error = error {
print("Error processing live photo companion movie: \(error)")
return
Expand All @@ -91,7 +91,7 @@ class LivePhotoCaptureDelegate: NSObject, AVCapturePhotoCaptureDelegate {
livePhotoCompanionMovieURL = outputFileURL
}

func capture(_ captureOutput: AVCapturePhotoOutput, didFinishCaptureForResolvedSettings resolvedSettings: AVCaptureResolvedPhotoSettings, error: Error?) {
func photoOutput(_ captureOutput: AVCapturePhotoOutput, didFinishCaptureFor resolvedSettings: AVCaptureResolvedPhotoSettings, error: Error?) {
if let error = error {
print("Error capturing photo: \(error)")
didFinish()
Expand Down
24 changes: 12 additions & 12 deletions iOS-10-Sampler/Samples/LivePhotoCaptureSessionManager.swift
Original file line number Diff line number Diff line change
Expand Up @@ -41,14 +41,14 @@ class LivePhotoCaptureSessionManager: NSObject {

session.beginConfiguration()

session.sessionPreset = AVCaptureSessionPresetPhoto
session.sessionPreset = .photo

// Add video input.
do {
let videoDevice = AVCaptureDevice.defaultDevice(
withDeviceType: AVCaptureDeviceType.builtInWideAngleCamera,
mediaType: AVMediaTypeVideo,
position: .back)
guard let videoDevice = AVCaptureDevice.default(
.builtInWideAngleCamera,
for: .video,
position: .back) else {fatalError()}
let videoDeviceInput = try AVCaptureDeviceInput(device: videoDevice)

if session.canAddInput(videoDeviceInput) {
Expand All @@ -71,7 +71,7 @@ class LivePhotoCaptureSessionManager: NSObject {

// Add audio input.
do {
let audioDevice = AVCaptureDevice.defaultDevice(withMediaType: AVMediaTypeAudio)
guard let audioDevice = AVCaptureDevice.default(for: .audio) else {fatalError()}
let audioDeviceInput = try AVCaptureDeviceInput(device: audioDevice)

if session.canAddInput(audioDeviceInput) {
Expand Down Expand Up @@ -112,7 +112,7 @@ class LivePhotoCaptureSessionManager: NSObject {
// MARK: - Public

func authorize() {
switch AVCaptureDevice.authorizationStatus(forMediaType: AVMediaTypeVideo) {
switch AVCaptureDevice.authorizationStatus(for: .video) {
case .authorized:
break

Expand All @@ -126,7 +126,7 @@ class LivePhotoCaptureSessionManager: NSObject {
create an AVCaptureDeviceInput for audio during session setup.
*/
sessionQueue.suspend()
AVCaptureDevice.requestAccess(forMediaType: AVMediaTypeVideo, completionHandler: { [unowned self] granted in
AVCaptureDevice.requestAccess(for: .video, completionHandler: { [unowned self] granted in
if !granted {
self.setupResult = .notAuthorized
}
Expand Down Expand Up @@ -182,17 +182,17 @@ class LivePhotoCaptureSessionManager: NSObject {

sessionQueue.async {
// Update the photo output's connection to match the video orientation of the video preview layer.
if let photoOutputConnection = self.photoOutput.connection(withMediaType: AVMediaTypeVideo) {
if let photoOutputConnection = self.photoOutput.connection(with: .video) {
photoOutputConnection.videoOrientation = videoOrientation
}

// Capture a JPEG photo with flash set to auto and high resolution photo enabled.
let photoSettings = AVCapturePhotoSettings()
photoSettings.flashMode = .auto
photoSettings.isHighResolutionPhotoEnabled = true
if photoSettings.availablePreviewPhotoPixelFormatTypes.count > 0 {
photoSettings.previewPhotoFormat = [kCVPixelBufferPixelFormatTypeKey as String : photoSettings.availablePreviewPhotoPixelFormatTypes.first!]
}
// if photoSettings.availablePreviewPhotoPixelFormatTypes.count > 0 {
// photoSettings.previewPhotoFormat = [kCVPixelBufferPixelFormatTypeKey as String : photoSettings.availablePreviewPhotoPixelFormatTypes.first!]
// }
if self.photoOutput.isLivePhotoCaptureSupported { // Live Photo capture is not supported in movie mode.
let livePhotoMovieFileName = NSUUID().uuidString
let livePhotoMovieFilePath = (NSTemporaryDirectory() as NSString).appendingPathComponent((livePhotoMovieFileName as NSString).appendingPathExtension("mov")!)
Expand Down
4 changes: 2 additions & 2 deletions iOS-10-Sampler/Samples/LivePhotoCaptureViewController.swift
Original file line number Diff line number Diff line change
Expand Up @@ -96,8 +96,8 @@ class LivePhotoCaptureViewController: UIViewController {
// MARK: - Actions

@IBAction private func capturePhoto(_ photoButton: UIButton) {
let videoPreviewLayerOrientation = previewView.videoPreviewLayer.connection.videoOrientation
LivePhotoCaptureSessionManager.sharedManager.capture(videoOrientation: videoPreviewLayerOrientation) { (inProgressLivePhotoCapturesCount) in
let videoPreviewLayerOrientation = previewView.videoPreviewLayer.connection?.videoOrientation
LivePhotoCaptureSessionManager.sharedManager.capture(videoOrientation: videoPreviewLayerOrientation!) { (inProgressLivePhotoCapturesCount) in
DispatchQueue.main.async { [unowned self] in
if inProgressLivePhotoCapturesCount > 0 {
self.capturingLivePhotoLabel.isHidden = false
Expand Down
2 changes: 1 addition & 1 deletion iOS-10-Sampler/Samples/Looper.swift
Original file line number Diff line number Diff line change
Expand Up @@ -40,7 +40,7 @@ class Looper: NSObject {

// Getting the natural size of the video
// http://stackoverflow.com/questions/14466842/ios-6-avplayeritem-presentationsize-returning-zero-naturalsize-method-deprec
let videoTracks = playerItem.asset.tracks(withMediaType: AVMediaTypeVideo)
let videoTracks = playerItem.asset.tracks(withMediaType: .video)
guard let videoSize = videoTracks.first?.naturalSize else {fatalError()}

parentLayer.addSublayer(playerLayer)
Expand Down
2 changes: 1 addition & 1 deletion iOS-10-Sampler/Samples/MNISTDeepCNN.swift
Original file line number Diff line number Diff line change
Expand Up @@ -116,7 +116,7 @@ class MNISTDeepCNN {
// so the user can decide the appropriate time to release this
autoreleasepool{
// Get command buffer to use in MetalPerformanceShaders.
let commandBuffer = commandQueue.makeCommandBuffer()
guard let commandBuffer = commandQueue.makeCommandBuffer() else {return}

// output will be stored in this image
let finalLayer = MPSImage(device: commandBuffer.device, imageDescriptor: did)
Expand Down
37 changes: 26 additions & 11 deletions iOS-10-Sampler/Samples/MetalCNNBasicApple/SlimMPSCNN.swift
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@ class SlimMPSCNNConvolution: MPSCNNConvolution{
/**
A property to keep info from init time whether we will pad input image or not for use during encode call
*/
private var padding = true
internal var isPadding = true

/**
Initializes a fully connected kernel.
Expand All @@ -34,7 +34,7 @@ class SlimMPSCNNConvolution: MPSCNNConvolution{
- neuronFilter: A neuronFilter to add at the end as activation, default is nil
- device: The MTLDevice on which this SlimMPSCNNConvolution filter will be used
- kernelParamsBinaryName: name of the layer to fetch kernelParameters by adding a prefix "weights_" or "bias_"
- padding: Bool value whether to use padding or not
- isPadding: Bool value whether to use padding or not
- strideXY: Stride of the filter
- destinationFeatureChannelOffset: FeatureChannel no. in the destination MPSImage to start writing from, helps with concat operations
- groupNum: if grouping is used, default value is 1 meaning no groups
Expand Down Expand Up @@ -93,7 +93,7 @@ class SlimMPSCNNConvolution: MPSCNNConvolution{
self.destinationFeatureChannelOffset = Int(destinationFeatureChannelOffset)

// set padding for calculation of offset during encode call
padding = willPad
isPadding = willPad

// unmap files at initialization of MPSCNNConvolution, the weights are copied and packed internally we no longer require these
assert(munmap(hdrW, Int(sizeWeights)) == 0, "munmap failed with errno = \(errno)")
Expand All @@ -104,6 +104,10 @@ class SlimMPSCNNConvolution: MPSCNNConvolution{
close(fd_b)
}

required init?(coder aDecoder: NSCoder) {
fatalError("init(coder:) has not been implemented")
}

/**
Encode a MPSCNNKernel into a command Buffer. The operation shall proceed out-of-place.

Expand All @@ -118,16 +122,23 @@ class SlimMPSCNNConvolution: MPSCNNConvolution{
*/
override func encode(commandBuffer: MTLCommandBuffer, sourceImage: MPSImage, destinationImage: MPSImage) {
// select offset according to padding being used or not
if padding {
let pad_along_height = ((destinationImage.height - 1) * strideInPixelsY + kernelHeight - sourceImage.height)
let pad_along_width = ((destinationImage.width - 1) * strideInPixelsX + kernelWidth - sourceImage.width)
let pad_top = Int(pad_along_height / 2)
let pad_left = Int(pad_along_width / 2)

self.offset = MPSOffset(x: ((Int(kernelWidth)/2) - pad_left), y: (Int(kernelHeight/2) - pad_top), z: 0)
if isPadding {
if #available(iOS 11.0, *) {
let pad_along_height = ((destinationImage.height - 1) * strideInPixelsY + kernelHeight - sourceImage.height)
let pad_along_width = ((destinationImage.width - 1) * strideInPixelsX + kernelWidth - sourceImage.width)
let pad_top = Int(pad_along_height / 2)
let pad_left = Int(pad_along_width / 2)
self.offset = MPSOffset(x: ((Int(kernelWidth)/2) - pad_left), y: (Int(kernelHeight/2) - pad_top), z: 0)
} else {
// Fallback on earlier versions
}
}
else{
self.offset = MPSOffset(x: Int(kernelWidth)/2, y: Int(kernelHeight)/2, z: 0)
if #available(iOS 11.0, *) {
self.offset = MPSOffset(x: Int(kernelWidth)/2, y: Int(kernelHeight)/2, z: 0)
} else {
// Fallback on earlier versions
}
}

super.encode(commandBuffer: commandBuffer, sourceImage: sourceImage, destinationImage: destinationImage)
Expand Down Expand Up @@ -212,4 +223,8 @@ class SlimMPSCNNFullyConnected: MPSCNNFullyConnected{
close(fd_w)
close(fd_b)
}

required init?(coder aDecoder: NSCoder) {
fatalError("init(coder:) has not been implemented")
}
}
Loading

0 comments on commit 0d3bd7f

Please sign in to comment.