But I found that there are some changes for a while.
So, here is very simple example for using AVCaptureSession.
class CameraViewController: UIViewController {
@IBOutlet weak var layerView : UIView!
var previewLayer : AVCaptureVideoPreviewLayer!
var stillImageOutput : AVCapturePhotoOutput = AVCapturePhotoOutput();
override func viewDidLoad() {
super.viewDidLoad()
// Do any additional setup after loading the view.
initCameraOverlay();
}
override func didReceiveMemoryWarning() {
super.didReceiveMemoryWarning()
// Dispose of any resources that can be recreated.
}
/*
// MARK: - Navigation
// In a storyboard-based application, you will often want to do a little preparation before navigation
override func prepare(for segue: UIStoryboardSegue, sender: Any?) {
// Get the new view controller using segue.destinationViewController.
// Pass the selected object to the new view controller.
}
*/
func initCameraOverlay() {
let session = AVCaptureSession();
session.sessionPreset = AVCaptureSessionPresetPhoto;
previewLayer = AVCaptureVideoPreviewLayer(session: session);
previewLayer.videoGravity = AVLayerVideoGravityResizeAspectFill;
previewLayer.frame = layerView.bounds;
layerView.layer.addSublayer(previewLayer);
layerView.layer.masksToBounds = true;
let descoverySession = AVCaptureDeviceDiscoverySession(deviceTypes: [.builtInWideAngleCamera , .builtInDualCamera], mediaType: AVMediaTypeVideo, position: .unspecified);
if let devices = descoverySession?.devices {
for device in devices {
if (device.position == .back) {
do {
let input = try AVCaptureDeviceInput(device: device)
if session.canAddInput(input) {
session.addInput(input)
}
}
catch {
}
}
if (device.position == .front) {
// Do something
}
}
}
if session.canAddOutput(self.stillImageOutput) {
session.addOutput(stillImageOutput);
}
session.startRunning();
}
}