顾名思义,我正在尝试在iMessage应用扩展中的视图内访问摄像机。这段代码在普通应用上正常运行,但是我的iMessage扩展没有显示任何内容。我也设置了info.plist。
谢谢。
import UIKit
import Messages
import AVFoundation
class MessagesViewController: MSMessagesAppViewController {
@IBOutlet weak var cameraView: UIView!
var session : AVCaptureSession?
var stillImageOutput : AVCaptureStillImageOutput?
var videoPreviewLayer : AVCaptureVideoPreviewLayer?
var captureDevice : AVCaptureDevice?
override func viewDidLoad() {
super.viewDidLoad()
// Do any additional setup after loading the view.
alignment()
}
func alignment() {
cameraView.bounds.size.width = view.bounds.size.width / 4
cameraView.bounds.size.height = cameraView.bounds.size.width
cameraView.layer.cornerRadius = cameraView.bounds.size.width / 2
}
override func viewWillAppear(_ animated: Bool) {
session = AVCaptureSession()
session!.sessionPreset = AVCaptureSessionPresetPhoto
let videoDevices = AVCaptureDevice.devices(withMediaType: AVMediaTypeVideo)
for device in videoDevices! {
let device = device as! AVCaptureDevice
if device.position == AVCaptureDevicePosition.front {
captureDevice = device
}
}
//We will make a new AVCaptureDeviceInput and attempt to associate it with our backCamera input device.
//There is a chance that the input device might not be available, so we will set up a try catch to handle any potential errors we might encounter.
var error : NSError?
var input : AVCaptureDeviceInput!
do {
input = try AVCaptureDeviceInput(device: captureDevice)
} catch let error1 as NSError {
error = error1
input = nil
print(error!.localizedDescription)
}
if error == nil && session!.canAddInput(input) {
session!.addInput(input)
// The remainder of the session setup will go here...
stillImageOutput = AVCaptureStillImageOutput()
stillImageOutput?.outputSettings = [AVVideoCodecKey : AVVideoCodecJPEG]
if session!.canAddOutput(stillImageOutput) {
session!.addOutput(stillImageOutput)
//configure live preview here
videoPreviewLayer = AVCaptureVideoPreviewLayer(session: session)
videoPreviewLayer!.videoGravity = AVLayerVideoGravityResizeAspect
videoPreviewLayer!.connection?.videoOrientation = AVCaptureVideoOrientation.portrait
cameraView.layer.addSublayer(videoPreviewLayer!)
session!.startRunning()
}
}
}
override func viewDidAppear(_ animated: Bool) {
videoPreviewLayer!.frame = cameraView.bounds
}
override func didReceiveMemoryWarning() {
super.didReceiveMemoryWarning()
// Dispose of any resources that can be recreated.
}
// MARK: - Conversation Handling
override func willBecomeActive(with conversation: MSConversation) {
// Called when the extension is about to move from the inactive to active state.
// This will happen when the extension is about to present UI.
// Use this method to configure the extension and restore previously stored state.
}
override func didResignActive(with conversation: MSConversation) {
// Called when the extension is about to move from the active to inactive state.
// This will happen when the user dissmises the extension, changes to a different
// conversation or quits Messages.
// Use this method to release shared resources, save user data, invalidate timers,
// and store enough state information to restore your extension to its current state
// in case it is terminated later.
}
override func didReceive(_ message: MSMessage, conversation: MSConversation) {
// Called when a message arrives that was generated by another instance of this
// extension on a remote device.
// Use this method to trigger UI updates in response to the message.
}
override func didStartSending(_ message: MSMessage, conversation: MSConversation) {
// Called when the user taps the send button.
}
override func didCancelSending(_ message: MSMessage, conversation: MSConversation) {
// Called when the user deletes the message without sending it.
// Use this to clean up state related to the deleted message.
}
override func willTransition(to presentationStyle: MSMessagesAppPresentationStyle) {
// Called before the extension transitions to a new presentation style.
// Use this method to prepare for the change in presentation style.
}
override func didTransition(to presentationStyle: MSMessagesAppPresentationStyle) {
// Called after the extension transitions to a new presentation style.
// Use this method to finalize any behaviors associated with the change in presentation style.
}
}
最佳答案
如果要在iMessage应用程序扩展的视图中使用相机,请在项目的info.plist文件和MessageExtension的info.plist文件中都授予“相机”访问权限,如下所示:
在info.plist中:
<key>NSCameraUsageDescription</key>
<string>To take a pic.</string>
授予权限后,您应该写以下内容:
import UIKit
import Messages
import AVFoundation
class MessagesViewController: MSMessagesAppViewController
{
@IBOutlet weak var cameraView: UIView!
@IBOutlet weak var CaptureBtn: UIButton!
@IBOutlet weak var capturedImage: UIImageView!
var captureSession: AVCaptureSession?
var stillImageOutput: AVCaptureStillImageOutput?
var previewLayer: AVCaptureVideoPreviewLayer?
var captureDevice : AVCaptureDevice?
override func viewDidLoad()
{
super.viewDidLoad()
cameraView.frame = view.bounds
cameraView.translatesAutoresizingMaskIntoConstraints = true
}
override func viewWillAppear(_ animated: Bool) {
super.viewWillAppear(animated)
captureSession = AVCaptureSession()
captureSession!.sessionPreset = AVCaptureSessionPresetPhoto
let backCamera = AVCaptureDevice.defaultDevice(withMediaType: AVMediaTypeVideo)
var error: NSError?
var input: AVCaptureDeviceInput!
do {
input = try AVCaptureDeviceInput(device: backCamera)
} catch let error1 as NSError {
error = error1
input = nil
}
if error == nil && captureSession!.canAddInput(input) {
captureSession!.addInput(input)
stillImageOutput = AVCaptureStillImageOutput()
stillImageOutput!.outputSettings = [AVVideoCodecKey: AVVideoCodecJPEG]
if captureSession!.canAddOutput(stillImageOutput) {
captureSession!.addOutput(stillImageOutput)
previewLayer = AVCaptureVideoPreviewLayer(session: captureSession)
previewLayer!.videoGravity = AVLayerVideoGravityResizeAspect
previewLayer!.connection?.videoOrientation = AVCaptureVideoOrientation.portrait
cameraView.layer.addSublayer(previewLayer!)
captureSession!.startRunning()
}
}
}
override func viewDidAppear(_ animated: Bool) {
super.viewDidAppear(animated)
previewLayer!.frame = cameraView.bounds
}
@IBAction func CaptureBtn_Pressed(_ sender: Any)
{
if let videoConnection = stillImageOutput!.connection(withMediaType: AVMediaTypeVideo) {
videoConnection.videoOrientation = AVCaptureVideoOrientation.portrait
stillImageOutput?.captureStillImageAsynchronously(from: videoConnection, completionHandler: {(sampleBuffer, error) in
if (sampleBuffer != nil) {
let imageData = AVCaptureStillImageOutput.jpegStillImageNSDataRepresentation(sampleBuffer)
let dataProvider = CGDataProvider(data: imageData as! CFData)
let cgImageRef = CGImage(jpegDataProviderSource: dataProvider!, decode: nil, shouldInterpolate: true, intent: CGColorRenderingIntent.defaultIntent)
let image = UIImage(cgImage: cgImageRef!, scale: 1.0, orientation: UIImageOrientation.right)
self.capturedImage.image = image
}
})
}
}
override func didReceiveMemoryWarning()
{
super.didReceiveMemoryWarning()
}
// MARK: - Conversation Handling
override func willBecomeActive(with conversation: MSConversation)
{
}
override func didResignActive(with conversation: MSConversation)
{
}
override func didReceive(_ message: MSMessage, conversation: MSConversation)
{
}
override func didStartSending(_ message: MSMessage, conversation: MSConversation)
{
}
override func didCancelSending(_ message: MSMessage, conversation: MSConversation)
{
}
override func willTransition(to presentationStyle: MSMessagesAppPresentationStyle)
{
}
override func didTransition(to presentationStyle: MSMessagesAppPresentationStyle)
{
}
}
关于ios - 无法从iMessage App访问摄像头,我们在Stack Overflow上找到一个类似的问题:https://stackoverflow.com/questions/40791180/