在previewLayer上绘图:AVCaptureVideoPreviewLayer

Posted

tags:

篇首语:本文由小常识网(cha138.com)小编为大家整理,主要介绍了在previewLayer上绘图:AVCaptureVideoPreviewLayer相关的知识,希望对你有一定的参考价值。

我有一个小应用程序,一个显示实时(视频)预览的SimpleCamera,屏幕上有一个按钮可以拍照。然后显示照片,您可以保存或丢弃它。一切正常,我使用此代码在屏幕预览周围绘制一个灰色边框。这也很好。但是我可以在预览屏幕上绘制这些内容吗?我无法弄清楚如何添加第一个代码块下面显示的下一位代码?

    // Provide a camera preview
    cameraPreviewLayer = AVCaptureVideoPreviewLayer(session: captureSession)
    view.layer.addSublayer(cameraPreviewLayer!)
    cameraPreviewLayer?.videoGravity = AVLayerVideoGravity.resizeAspectFill
    cameraPreviewLayer?.frame = view.layer.frame
    //Add preview layer for drawing
    let previewLayer: AVCaptureVideoPreviewLayer = AVCaptureVideoPreviewLayer(session: self.captureSession)
    previewLayer.frame = self.view.layer.frame
    previewLayer.videoGravity = AVLayerVideoGravity.resizeAspectFill
    self.view.layer.addSublayer(previewLayer)
    //Add Rectangle
    let cgRect = CGRect(x: 0, y: 0, width: self.view.bounds.width, height: self.view.bounds.height)
    let myView = UIImageView()
    myView.frame = cgRect
    myView.backgroundColor = UIColor.clear
    myView.isOpaque = false
    myView.layer.cornerRadius = 10
    myView.layer.borderColor =  UIColor.lightGray.cgColor
    myView.layer.borderWidth = 3
    myView.layer.masksToBounds = true
    previewLayer.addSublayer(myView.layer)
    // Bring the camera button to front
    view.bringSubview(toFront: cameraButton)
    captureSession.startRunning()

无论我把这段代码放在哪里,它都不会显示出来。

    //Add circles
    let midX = screenWidth / 2
    let midY = screenHeight / 2
    let w = screenWidth
    var circlePath = UIBezierPath(arcCenter: CGPoint(x: midX,y: midY), radius: CGFloat(w * 0.010), startAngle: CGFloat(0), endAngle:CGFloat(Double.pi * 2), clockwise: true)
    let circleRads = [ 0.07, 0.13, 0.17, 0.22, 0.29, 0.36, 0.40, 0.48, 0.60, 0.75 ]
    for pct in circleRads {
        let rad = w * CGFloat(pct)
        circlePath = UIBezierPath(arcCenter: CGPoint(x: midX, y: midY), radius: CGFloat(rad), startAngle: CGFloat(0), endAngle:CGFloat(Double.pi * 2), clockwise: true)
        circlePath.lineWidth = 2.5
        circlePath.stroke()
    }

     // draw text time stamp on image
     let now = Date()
     let formatter = DateFormatter()
     formatter.timeZone = TimeZone.current
     formatter.dateFormat = "yyyy-MM-dd HH:mm"
     let dateString = formatter.string(from: now)
     let paragraphStyle = NSMutableParagraphStyle()
     paragraphStyle.alignment = .center
     let attrs = [NSAttributedStringKey.font: UIFont(name: "HelveticaNeue-Thin", size: 26)!, NSAttributedStringKey.paragraphStyle: paragraphStyle]
     let string = dateString
     string.draw(with: CGRect(x: 12, y: 38, width: 448, height: 448), options: .usesLineFragmentOrigin, attributes: attrs, context: nil)
答案

部分回答。我可以在整个屏幕上画一个边框。这是来自AppCoda Swift 4中级ios 11 Book的SimpleCamera应用程序。这是CameraController.swift文件的代码,在XCode中打开时,边框绘图部分是第176行到第192行。但我仍然无法弄清楚如何使评论部分绘制一组圆圈,并在图像上放置日期戳,并保存它。

//
//  CameraController.swift
//  Camera
//
//  Created by Simon Ng on 16/10/2016.
//  Copyright © 2016 AppCoda. All rights reserved.
//

import UIKit
import AVFoundation
import Foundation

class CameraController: UIViewController {

@IBOutlet var cameraButton:UIButton!

//===================================
@IBOutlet weak var navigationBar: UINavigationBar!
@IBOutlet weak var imgOverlay: UIImageView!
@IBOutlet weak var btnCapture: UIButton!
@IBOutlet weak var btnInfo: UIButton!
@IBOutlet weak var btnSocial: UIButton!
@IBOutlet weak var shapeLayer: UIView!
@IBOutlet weak var btnRed: UIButton!
@IBOutlet weak var btnGreen: UIButton!
@IBOutlet weak var btnBlue: UIButton!
@IBOutlet weak var btnYellow: UIButton!
@IBOutlet weak var btnWhite: UIButton!

//===================================

var backFacingCamera: AVCaptureDevice?
var frontFacingCamera: AVCaptureDevice?
var currentDevice: AVCaptureDevice!

var stillImageOutput: AVCapturePhotoOutput!
var stillImage: UIImage?

var cameraPreviewLayer: AVCaptureVideoPreviewLayer?


let captureSession = AVCaptureSession()

var toggleCameraGestureRecognizer = UISwipeGestureRecognizer()
var zoomInGestureRecognizer = UISwipeGestureRecognizer()
var zoomOutGestureRecognizer = UISwipeGestureRecognizer()
//===============================

//let stillImageOutput = AVCaptureStillImageOutput()
var previewLayer : AVCaptureVideoPreviewLayer?


let screenWidth = UIScreen.main.bounds.size.width
let screenHeight = UIScreen.main.bounds.size.height
var aspectRatio: CGFloat = 1.0

var viewFinderHeight: CGFloat = 0.0
var viewFinderWidth: CGFloat = 0.0
var viewFinderMarginLeft: CGFloat = 0.0
var viewFinderMarginTop: CGFloat = 0.0

var lineColor : UIColor?
var color: Int = 0
//==============================


override func viewDidLoad() {
    super.viewDidLoad()


     configure()
}



override func didReceiveMemoryWarning() {
    super.didReceiveMemoryWarning()
    // Dispose of any resources that can be recreated.
}

// MARK: - Action methods

@IBAction func capture(sender: UIButton) {

    // Set photo settings
    let photoSettings = AVCapturePhotoSettings(format: [AVVideoCodecKey: AVVideoCodecType.jpeg])
    photoSettings.isAutoStillImageStabilizationEnabled = true
    photoSettings.isHighResolutionPhotoEnabled = true
    photoSettings.flashMode = .off

    stillImageOutput.isHighResolutionCaptureEnabled = true
    stillImageOutput.capturePhoto(with: photoSettings, delegate: self)
}

// MARK: - Segues

@IBAction func unwindToCameraView(segue: UIStoryboardSegue) {

}

override func prepare(for segue: UIStoryboardSegue, sender: Any?) {
    // Get the new view controller using segue.destinationViewController.
    // Pass the selected object to the new view controller.
    if segue.identifier == "showPhoto" {
        let photoViewController = segue.destination as! PhotoViewController
        photoViewController.image = stillImage
    }
}

// MARK: - Helper methods

private func configure() {
    // Preset the session for taking photo in full resolution
    captureSession.sessionPreset = AVCaptureSession.Preset.photo

    // Get the front and back-facing camera for taking photos
    let deviceDiscoverySession = AVCaptureDevice.DiscoverySession(deviceTypes: [.builtInWideAngleCamera], mediaType: AVMediaType.video, position: .unspecified)

    for device in deviceDiscoverySession.devices {
        if device.position == .back {
            backFacingCamera = device
        } else if device.position == .front {
            frontFacingCamera = device
        }
    }

    currentDevice = backFacingCamera

    guard let captureDeviceInput = try? AVCaptureDeviceInput(device: currentDevice) else {
        return
    }

    // Configure the session with the output for capturing still images
    stillImageOutput = AVCapturePhotoOutput()

    // Configure the session with the input and the output devices
    captureSession.addInput(captureDeviceInput)
    captureSession.addOutput(stillImageOutput)

    // Provide a camera preview
    cameraPreviewLayer = AVCaptureVideoPreviewLayer(session: captureSession)
    view.layer.addSublayer(cameraPreviewLayer!)
    cameraPreviewLayer?.videoGravity = AVLayerVideoGravity.resizeAspectFill
    cameraPreviewLayer?.frame = view.layer.frame
    //////////////

    //Add circles
    // red circles - radius in %
/*
    let midX = screenWidth / 2
    let midY = screenHeight / 2
    let w = screenWidth
    //let h = screenHeight
    var circlePath = UIBezierPath(arcCenter: CGPoint(x: midX,y: midY), radius: CGFloat(w * 0.010), startAngle: CGFloat(0), endAngle:CGFloat(Double.pi * 2), clockwise: true)
    let circleRads = [ 0.07, 0.13, 0.17, 0.22, 0.29, 0.36, 0.40, 0.48, 0.60, 0.75 ]
    for pct in circleRads {
        let rad = w * CGFloat(pct)
        circlePath = UIBezierPath(arcCenter: CGPoint(x: midX, y: midY), radius: CGFloat(rad), startAngle: CGFloat(0), endAngle:CGFloat(Double.pi * 2), clockwise: true)
        circlePath.lineWidth = 2.5
        circlePath.stroke()
    }
     // draw text time stamp on image
     let now = Date()
     let formatter = DateFormatter()
     formatter.timeZone = TimeZone.current
     formatter.dateFormat = "yyyy-MM-dd HH:mm"
     let dateString = formatter.string(from: now)
     // print(dateString)
     let paragraphStyle = NSMutableParagraphStyle()
     paragraphStyle.alignment = .center
     let attrs = [NSAttributedStringKey.font: UIFont(name: "HelveticaNeue-Thin", size: 26)!, NSAttributedStringKey.paragraphStyle: paragraphStyle]
     let string = dateString
     string.draw(with: CGRect(x: 22, y: 18, width: 448, height: 448), options: .usesLineFragmentOrigin, attributes: attrs, context: nil)
     print("Did the date")
*/
    //Add Rectangular border
     let previewLayer: AVCaptureVideoPreviewLayer = AVCaptureVideoPreviewLayer(session: self.captureSession)
     previewLayer.frame = self.view.layer.frame
     previewLayer.videoGravity = AVLayerVideoGravity.resizeAspectFill
     self.view.layer.addSublayer(previewLayer)

    let cgRect = CGRect(x: 0, y: 0, width: self.view.bounds.width, height: self.view.bounds.height)
    let myView = UIImageView()
    myView.frame = cgRect
    myView.backgroundColor = UIColor.clear
    myView.isOpaque = false
    myView.layer.cornerRadius = 10
    myView.layer.borderColor =  UIColor.lightGray.cgColor
    myView.layer.borderWidth = 3
    myView.layer.masksToBounds = true

    previewLayer.addSublayer(myView.layer)
    ///////////////

    // Bring the camera button to front
    view.bringSubview(toFront: cameraButton)
    captureSession.startRunning()


    print("so far 2")
    // Toggle Camera recognizer
    toggleCameraGestureRecognizer.direction = .up
    toggleCameraGestureRecognizer.addTarget(self, action: #selector(toggleCamera))
    view.addGestureRecognizer(toggleCameraGestureRecognizer)

    // Zoom In recognizer
    zoomInGestureRecognizer.direction = .right
    zoomInGestureRecognizer.addTarget(self, action: #selector(zoomIn))
    view.addGestureRecognizer(zoomInGestureRecognizer)

    // Zoom Out recognizer
    zoomOutGestureRecognizer.direction = .left
    zoomOutGestureRecognizer.addTarget(self, action: #selector(zoomOut))
    view.addGestureRecognizer(zoomOutGestureRecognizer)
}

@objc func toggleCamera() {
    captureSession.beginConfiguration()

    // Change the device based on the current camera
    guard let newDevice = (currentDevice?.position == AVCaptureDevice.Position.back) ? frontFacingCamera : backFacingCamera else {
        return
    }

    // Remove all inputs from the session
    for input in captureSession.inputs {
        captureSession.removeInput(input as! AVCaptureDeviceInput)
    }

    // Change to the new input
    let cameraInput:AVCaptureDeviceInput
    do {
        cameraInput = try AVCaptureDeviceInput(device: newD

以上是关于在previewLayer上绘图:AVCaptureVideoPreviewLayer的主要内容,如果未能解决你的问题,请参考以下文章

后置摄像头在 Swift 中不起作用

如何在 HTML5 画布上绘图

在android中录制视频时如何在视频上绘图,并保存视频和绘图?

在 JavaFx 上保存/加载窗格绘图

如何在移动设备的画布上绘图?

OpenCV:在图像上绘图