当接近传感器启用时,我无法使用后视摄像机使设备拍摄图像.我不想让相机预览显示出来,只是想让设备拍摄照片并将其显示在imageView中.我有接近传感器工作,我使用imagePicker.takePicture()在接近传感器启用时拍摄图像,但似乎不起作用.我可以使用哪种方法/函数以编程方式拍摄没有用户输入的图片.
这是我的代码到目前为止
- class ViewController: UIViewController,UINavigationControllerDelegate,UIImagePickerControllerDelegate {
- @IBOutlet var imageView: UIImageView!
- var imagePicker: UIImagePickerController!
- //*The function in question*
- func proximityChanged(notification: NSNotification) {
- let device = notification.object as? UIDevice
- if device?.proximityState == true {
- print("\(device) detected!")
解决方法
如果你有麻烦用UIImagePickerController捕获照片,我建议使用AVFoundation.
以下是一个工作示例.照片捕获由接近传感器触发.
您可以添加一个预览,如果你需要它.
- import UIKit
- import AVFoundation
- final class CaptureViewController: UIViewController {
- @IBOutlet weak var imageView: UIImageView!
- private static let captureSessionPreset = AVCaptureSessionPresetPhoto
- private var captureSession: AVCaptureSession!
- private var photoOutput: AVCaptureStillImageOutput!
- private var initialized = false
- override func viewDidLoad() {
- super.viewDidLoad()
- initialized = setupCaptureSession()
- }
- override func viewWillAppear(animated: Bool) {
- super.viewWillAppear(animated)
- if initialized {
- captureSession.startRunning()
- UIDevice.currentDevice().proximityMonitoringEnabled = true
- NSNotificationCenter.defaultCenter().addObserver(self,selector: #selector(proximityStateDidChange),name: UIDeviceProximityStateDidChangeNotification,object: nil)
- }
- }
- override func viewDidDisappear(animated: Bool) {
- super.viewDidDisappear(animated)
- if initialized {
- NSNotificationCenter.defaultCenter().removeObserver(self,object: nil)
- UIDevice.currentDevice().proximityMonitoringEnabled = false
- captureSession.stopRunning()
- }
- }
- dynamic func proximityStateDidChange(notification: NSNotification) {
- if UIDevice.currentDevice().proximityState {
- captureImage()
- }
- }
- // MARK: - Capture Image
- private func captureImage() {
- if let c = findConnection() {
- photoOutput.captureStillImageAsynchronouslyFromConnection(c) { sampleBuffer,error in
- if let jpeg = AVCaptureStillImageOutput.jpegStillImageNSDataRepresentation(sampleBuffer),let image = UIImage(data: jpeg)
- {
- dispatch_async(dispatch_get_main_queue()) { [weak self] in
- self?.imageView.image = image
- }
- }
- }
- }
- }
- private func findConnection() -> AVCaptureConnection? {
- for c in photoOutput.connections {
- let c = c as? AVCaptureConnection
- for p in c?.inputPorts ?? [] {
- if p.mediaType == AVMediaTypeVideo {
- return c
- }
- }
- }
- return nil
- }
- // MARK: - Setup Capture Session
- private func setupCaptureSession() -> Bool {
- captureSession = AVCaptureSession()
- if captureSession.canSetSessionPreset(CaptureViewController.captureSessionPreset) {
- captureSession.sessionPreset = CaptureViewController.captureSessionPreset
- if setupCaptureSessionInput() && setupCaptureSessionOutput() {
- return true
- }
- }
- return false
- }
- private func setupCaptureSessionInput() -> Bool {
- if let captureDevice = AVCaptureDevice.defaultDeviceWithMediaType(AVMediaTypeVideo),let captureDeviceInput = try? AVCaptureDeviceInput.init(device: captureDevice)
- {
- if captureSession.canAddInput(captureDeviceInput) {
- captureSession.addInput(captureDeviceInput)
- return true
- }
- }
- return false
- }
- private func setupCaptureSessionOutput() -> Bool {
- photoOutput = AVCaptureStillImageOutput()
- photoOutput.outputSettings = [AVVideoCodecKey: AVVideoCodecJPEG]
- if captureSession.canAddOutput(photoOutput) {
- captureSession.addOutput(photoOutput)
- return true
- }
- return false
- }
- }