Bonjour, j'ai suivi un cours de Jared Davidson pour créer une vue de caméra personnalisée et enregistrer des images avec AVFoundation. https://www.youtube.com/watch?v=w0O3ZGUS3pk
Cependant, j'aimerais enregistrer et sauvegarder des vidéos plutôt que des images. Quelqu'un peut m'aider ici? Je suis sûr que c'est simple, mais la documentation d'Apple est écrite en Objective-C et je ne peux pas la déchiffrer.
Ceci est mon code. Merci.
import UIKit
import AVFoundation
class ViewController: UIViewController {
var captureSession = AVCaptureSession()
var sessionOutput = AVCaptureStillImageOutput()
var previewLayer = AVCaptureVideoPreviewLayer()
@IBOutlet var cameraView: UIView!
override func viewWillAppear(animated: Bool) {
let devices = AVCaptureDevice.devicesWithMediaType(AVMediaTypeVideo)
for device in devices {
if device.position == AVCaptureDevicePosition.Front{
do{
let input = try AVCaptureDeviceInput(device: device as! AVCaptureDevice)
if captureSession.canAddInput(input){
captureSession.addInput(input)
sessionOutput.outputSettings = [AVVideoCodecKey : AVVideoCodecJPEG]
if captureSession.canAddOutput(sessionOutput){
captureSession.addOutput(sessionOutput)
captureSession.startRunning()
previewLayer = AVCaptureVideoPreviewLayer(session: captureSession)
previewLayer.videoGravity = AVLayerVideoGravityResizeAspectFill
previewLayer.connection.videoOrientation = AVCaptureVideoOrientation.Portrait
cameraView.layer.addSublayer(previewLayer)
previewLayer.position = CGPoint(x: self.cameraView.frame.width / 2, y: self.cameraView.frame.height / 2)
previewLayer.bounds = cameraView.frame
}
}
}
catch{
print("Error")
}
}
}
}
@IBAction func TakePhoto(sender: AnyObject) {
if let videoConnection = sessionOutput.connectionWithMediaType(AVMediaTypeVideo){
sessionOutput.captureStillImageAsynchronouslyFromConnection(videoConnection, completionHandler: {
buffer, error in
let imageData = AVCaptureStillImageOutput.jpegStillImageNSDataRepresentation(buffer)
UIImageWriteToSavedPhotosAlbum(UIImage(data: imageData)!, nil, nil, nil)
})
}
}
}
Vous pouvez enregistrer votre vidéo dans un fichier en créant et en ajoutant une AVCaptureMovieFileOutput
à votre session de capture et en rendant votre ViewController
conforme à la AVCaptureFileOutputRecordingDelegate
.
Cet exemple enregistre 5 secondes de vidéo dans un fichier appelé "output.mov" dans le répertoire Documents de l'application.
class ViewController: UIViewController, AVCaptureFileOutputRecordingDelegate {
var captureSession = AVCaptureSession()
var sessionOutput = AVCaptureStillImageOutput()
var movieOutput = AVCaptureMovieFileOutput()
var previewLayer = AVCaptureVideoPreviewLayer()
@IBOutlet var cameraView: UIView!
override func viewWillAppear(animated: Bool) {
self.cameraView = self.view
let devices = AVCaptureDevice.devicesWithMediaType(AVMediaTypeVideo)
for device in devices {
if device.position == AVCaptureDevicePosition.Front{
do{
let input = try AVCaptureDeviceInput(device: device as! AVCaptureDevice)
if captureSession.canAddInput(input){
captureSession.addInput(input)
sessionOutput.outputSettings = [AVVideoCodecKey : AVVideoCodecJPEG]
if captureSession.canAddOutput(sessionOutput){
captureSession.addOutput(sessionOutput)
previewLayer = AVCaptureVideoPreviewLayer(session: captureSession)
previewLayer.videoGravity = AVLayerVideoGravityResizeAspectFill
previewLayer.connection.videoOrientation = AVCaptureVideoOrientation.Portrait
cameraView.layer.addSublayer(previewLayer)
previewLayer.position = CGPoint(x: self.cameraView.frame.width / 2, y: self.cameraView.frame.height / 2)
previewLayer.bounds = cameraView.frame
}
captureSession.addOutput(movieOutput)
captureSession.startRunning()
let paths = NSFileManager.defaultManager().URLsForDirectory(.DocumentDirectory, inDomains: .UserDomainMask)
let fileUrl = paths[0].URLByAppendingPathComponent("output.mov")
try? NSFileManager.defaultManager().removeItemAtURL(fileUrl)
movieOutput.startRecordingToOutputFileURL(fileUrl, recordingDelegate: self)
let delayTime = dispatch_time(DISPATCH_TIME_NOW, Int64(5 * Double(NSEC_PER_SEC)))
dispatch_after(delayTime, dispatch_get_main_queue()) {
print("stopping")
self.movieOutput.stopRecording()
}
}
}
catch{
print("Error")
}
}
}
}
func captureOutput(captureOutput: AVCaptureFileOutput!, didFinishRecordingToOutputFileAtURL outputFileURL: NSURL!, fromConnections connections: [AnyObject]!, error: NSError!) {
print("FINISHED \(error)")
// save video to camera roll
if error == nil {
UISaveVideoAtPathToSavedPhotosAlbum(outputFileURL.path!, nil, nil, nil)
}
}
}
Merci pour ça. Ça m'a beaucoup aidé. Voici une version de la réponse de Rhythmic Fistman portée dans Swift 3 avec les instructions d'importation et les méthodes de délégation requises.
import UIKit
import AVFoundation
class ViewController: UIViewController,
AVCaptureFileOutputRecordingDelegate {
var captureSession = AVCaptureSession()
var sessionOutput = AVCaptureStillImageOutput()
var movieOutput = AVCaptureMovieFileOutput()
var previewLayer = AVCaptureVideoPreviewLayer()
@IBOutlet var cameraView: UIView!
override func viewWillAppear(_ animated: Bool) {
self.cameraView = self.view
let devices = AVCaptureDevice.devices(withMediaType: AVMediaTypeVideo)
for device in devices! {
if (device as AnyObject).position == AVCaptureDevicePosition.front{
do{
let input = try AVCaptureDeviceInput(device: device as! AVCaptureDevice)
if captureSession.canAddInput(input){
captureSession.addInput(input)
sessionOutput.outputSettings = [AVVideoCodecKey : AVVideoCodecJPEG]
if captureSession.canAddOutput(sessionOutput){
captureSession.addOutput(sessionOutput)
previewLayer = AVCaptureVideoPreviewLayer(session: captureSession)
previewLayer.videoGravity = AVLayerVideoGravityResizeAspectFill
previewLayer.connection.videoOrientation = AVCaptureVideoOrientation.portrait
cameraView.layer.addSublayer(previewLayer)
previewLayer.position = CGPoint(x: self.cameraView.frame.width / 2, y: self.cameraView.frame.height / 2)
previewLayer.bounds = cameraView.frame
}
captureSession.addOutput(movieOutput)
captureSession.startRunning()
let paths = FileManager.default.urls(for: .documentDirectory, in: .userDomainMask)
let fileUrl = paths[0].appendingPathComponent("output.mov")
try? FileManager.default.removeItem(at: fileUrl)
movieOutput.startRecording(toOutputFileURL: fileUrl, recordingDelegate: self)
let delayTime = DispatchTime.now() + 5
DispatchQueue.main.asyncAfter(deadline: delayTime) {
print("stopping")
self.movieOutput.stopRecording()
}
}
}
catch{
print("Error")
}
}
}
}
//MARK: AVCaptureFileOutputRecordingDelegate Methods
func capture(_ captureOutput: AVCaptureFileOutput!, didStartRecordingToOutputFileAt fileURL: URL!, fromConnections connections: [Any]!) {
}
func capture(_ captureOutput: AVCaptureFileOutput!, didFinishRecordingToOutputFileAt outputFileURL: URL!, fromConnections connections: [Any]!, error: Error!) {
print("FINISHED \(error)")
// save video to camera roll
if error == nil {
UISaveVideoAtPathToSavedPhotosAlbum(outputFileURL.path, nil, nil, nil)
}
}
}
vous pouvez utiliser ce code pour enregistrer vos vidéos dans la photothèque. Vous devez indiquer les paramètres suivants, dont le plus important est OutputURL.path, chemin du système de fichiers du fichier vidéo que vous souhaitez enregistrer dans l'album Pellicule. Pour le reste des paramètres, vous pouvez soit passer leurs valeurs respectives, soit attribuer la valeur nil qui dépend de vos besoins.
func fileOutput(_ output: AVCaptureFileOutput, didFinishRecordingTo outputFileURL: URL, from connections: [AVCaptureConnection], error: Error?) {
if (error != nil) {
print("Error recording movie: \(error!.localizedDescription)")
} else {
UISaveVideoAtPathToSavedPhotosAlbum(outputFileURL.path, nil, #selector(CameraController.video(_:didFinishSavingWithError:contextInfo:)), nil)
}
outputURL = nil
}
Pour le problème d'enregistrement sonore,
Ajoutez ce code lors de la création de la captureSession
askMicroPhonePermission (complétion: {(isMicrophonePermissionGiven) dans
if isMicrophonePermissionGiven {
do {
try self.captureSession.addInput(AVCaptureDeviceInput(device: self.captureAudio))
} catch {
print("Error creating the database")
}
}
})
////////////////////////////////////////////////////////// //////////////
la fonction askMicroPhonePermission est la suivante
func askMicroPhonePermission(completion: @escaping (_ success: Bool)-> Void) {
switch AVAudioSession.sharedInstance().recordPermission() {
case AVAudioSessionRecordPermission.granted:
completion(true)
case AVAudioSessionRecordPermission.denied:
completion(false) //show alert if required
case AVAudioSessionRecordPermission.undetermined:
AVAudioSession.sharedInstance().requestRecordPermission({ (granted) in
if granted {
completion(true)
} else {
completion(false) // show alert if required
}
})
default:
completion(false)
}
}
Et vous devez ajouter la valeur de la clé NSMicrophoneUsageDescription dans le fichier info.plist.
après if (device as AnyObject).position == AVCaptureDevicePosition.front{
ajouter
// Audio Input
let audioInputDevice = AVCaptureDevice.defaultDevice(withMediaType: AVMediaTypeAudio)
do
{
let audioInput = try AVCaptureDeviceInput(device: audioInputDevice)
// Add Audio Input
if captureSession.canAddInput(audioInput)
{
captureSession.addInput(audioInput)
}
else
{
NSLog("Can't Add Audio Input")
}
}
catch let error
{
NSLog("Error Getting Input Device: \(error)")
}
Merci