我的代码中没有调用didOutputSampleBuffer函数.我不知道为什么会这样.这是代码:
import UIKit import AVFoundation import Accelerate class ViewController: UIViewController { var captureSession: AVCaptureSession? var dataOutput: AVCaptureVideoDataOutput? var customPreviewLayer: AVCaptureVideoPreviewLayer? @IBOutlet weak var camView: UIView! override func viewWillAppear(animated: Bool) { super.viewDidAppear(animated) captureSession?.startRunning() //setupCameraSession() } override func viewDidLoad() { super.viewDidLoad() // Do any additional setup after loading the view, typically from a nib. //captureSession?.startRunning() setupCameraSession() } override func didReceiveMemoryWarning() { super.didReceiveMemoryWarning() // Dispose of any resources that can be recreated. } func setupCameraSession() { // Session self.captureSession = AVCaptureSession() captureSession!.sessionPreset = AVCaptureSessionPreset1920x1080 // Capture device let inputDevice: AVCaptureDevice = AVCaptureDevice.defaultDeviceWithMediaType(AVMediaTypeVideo) var deviceInput = AVCaptureDeviceInput() do { deviceInput = try AVCaptureDeviceInput(device: inputDevice) } catch let error as NSError { print(error) } if captureSession!.canAddInput(deviceInput) { captureSession!.addInput(deviceInput) } // Preview self.customPreviewLayer = AVCaptureVideoPreviewLayer(session: captureSession) self.customPreviewLayer!.frame = camView.bounds self.customPreviewLayer?.videoGravity = AVLayerVideoGravityResizeAspect self.customPreviewLayer?.connection.videoOrientation = AVCaptureVideoOrientation.Portrait camView.layer.addSublayer(self.customPreviewLayer!) print("Cam layer added") self.dataOutput = AVCaptureVideoDataOutput() self.dataOutput!.videoSettings = [ String(kCVPixelBufferPixelFormatTypeKey) : Int(kCVPixelFormatType_420YpCbCr8BiPlanarFullRange) ] dataOutput!.alwaysDiscardsLateVideoFrames = true if captureSession!.canAddOutput(dataOutput) { captureSession!.addOutput(dataOutput) } captureSession!.commitConfiguration() let queue: dispatch_queue_t = dispatch_queue_create("VideoQueue", DISPATCH_QUEUE_SERIAL) let delegate = VideoDelegate() dataOutput!.setSampleBufferDelegate(delegate, queue: queue) } func captureOutput(captureOutput: AVCaptureOutput, didOutputSampleBuffer sampleBuffer: CMSampleBufferRef, fromConnection connection: AVCaptureConnection) { let imageBuffer: CVImageBufferRef = CMSampleBufferGetImageBuffer(sampleBuffer)! CVPixelBufferLockBaseAddress(imageBuffer, 0) // For the iOS the luma is contained in full plane (8-bit) let width: size_t = CVPixelBufferGetWidthOfPlane(imageBuffer, 0) let height: size_t = CVPixelBufferGetHeightOfPlane(imageBuffer, 0) let bytesPerRow: size_t = CVPixelBufferGetBytesPerRowOfPlane(imageBuffer, 0) let lumaBuffer: UnsafeMutablePointer = CVPixelBufferGetBaseAddressOfPlane(imageBuffer, 0) let grayColorSpace: CGColorSpaceRef = CGColorSpaceCreateDeviceGray()! let context: CGContextRef = CGBitmapContextCreate(lumaBuffer, width, height, 8, bytesPerRow, grayColorSpace, CGImageAlphaInfo.NoneSkipFirst.rawValue)! let dstImageFilter: CGImageRef = CGBitmapContextCreateImage(context)! dispatch_sync(dispatch_get_main_queue(), {() -> Void in self.customPreviewLayer!.contents = dstImageFilter as AnyObject }) } }
这是我的VideoDelegate代码:
import Foundation import AVFoundation import UIKit // Video Delegate class VideoDelegate : NSObject, AVCaptureVideoDataOutputSampleBufferDelegate { func captureOutput(captureOutput: AVCaptureOutput!, didOutputSampleBuffer sampleBuffer: CMSampleBuffer!, fromConnection connection: AVCaptureConnection!){ print("hihi") } func captureOutput(captureOutput: AVCaptureOutput!, didDropSampleBuffer sampleBuffer: CMSampleBuffer!, fromConnection connection: AVCaptureConnection!){ print("LOL") } }
为什么"我的委托被调用以及如何解决它?我已经检查了类似的堆栈溢出问题,但我找不到解决这个问题的方法.请帮忙.