如何在 QuickBlox 通话期间截取正在进行的视频

Posted

技术标签:

【中文标题】如何在 QuickBlox 通话期间截取正在进行的视频【英文标题】:How to take a screenshot of ongoing video during a call in QuickBlox 【发布时间】:2018-12-07 12:27:02 【问题描述】:

我想在 ios Swift 通话期间截取正在进行的VideoCaptureView 的屏幕截图。我使用 QuickBlox

我使用了以下返回黑色图像的代码

public extension UIView 

    public func snapshotImage() -> UIImage? 
        UIGraphicsBeginImageContextWithOptions(bounds.size, isOpaque,0)
        drawHierarchy(in: bounds, afterScreenUpdates: false)
        let snapshotImage = UIGraphicsGetImageFromCurrentImageContext()
        UIGraphicsEndImageContext()
        return snapshotImage
    

    public func snapshotView() -> UIView? 
        if let snapshotImage = snapshotImage() 
            return UIImageView(image: snapshotImage)
         else 
            return nil
        
    


let snapshot = view.snapshotView()

【问题讨论】:

【参考方案1】:

试试这个

fileprivate func captureUIImageFromUIView(_ view:UIView?) -> UIImage 

        guard (view != nil) else

            // if the view is nil (it's happened to me) return an alternative image
            let errorImage = UIImage(named: "Logo.png")
            return errorImage!
        

        // if the view is all good then convert the image inside the view to a uiimage
        if #available(iOS 10.0, *) 

            let renderer = UIGraphicsImageRenderer(size: view!.bounds.size)
            let capturedImage = renderer.image 
                (ctx) in
                view!.drawHierarchy(in: view!.bounds, afterScreenUpdates: true)
            
            return capturedImage

         else 

            UIGraphicsBeginImageContextWithOptions((view!.bounds.size), view!.isOpaque, 0.0)
            view!.drawHierarchy(in: view!.bounds, afterScreenUpdates: false)
            let capturedImage = UIGraphicsGetImageFromCurrentImageContext()
            UIGraphicsEndImageContext()
            return capturedImage!
        
    

打电话

let pdfImage = self.captureUIImageFromUIView(self.containerView)

// // ViewController.swift // GooglePlace 自动完成 // // 由 infos 于 2018 年 7 月 10 日创建。 // 版权所有 © 2018 infos。版权所有。 //

导入 UIKit 导入谷歌地图 导入 GooglePlaces 导入 SwiftyJSON 进口阿拉莫火

枚举位置 案例开始位置 案例目的地位置

类 ViewController: UIViewController , GMSMapViewDelegate , CLLocationManagerDelegate,UITextFieldDelegate

@IBOutlet weak var googleMaps: GMSMapView!
@IBOutlet weak var startLocation: UITextField!
@IBOutlet weak var destinationLocation: UITextField!


var locationManager = CLLocationManager()
var locationSelected = Location.startLocation
var polyline = GMSPolyline()
var locationStart = CLLocation()
var locationEnd = CLLocation()

override func viewDidLoad() 

    super.viewDidLoad()
    GMSPlacesClient.provideAPIKey("AIzaSyC55Dq1qPH7EM_uiAVf-8QuxJtf2W1viQs")
    GMSServices.provideAPIKey("AIzaSyC55Dq1qPH7EM_uiAVf-8QuxJtf2W1viQs")


    // Create a GMSCameraPosition that tells the map to display the

    let camera = GMSCameraPosition.camera(withLatitude: 13.082680,
                                          longitude: 80.270718,
                                          zoom: 10.0,
                                          bearing: 30,
                                          viewingAngle: 40)
    //Setting the googleView
    googleMaps.camera = camera
    googleMaps.delegate = self
    googleMaps.isMyLocationEnabled = true
    googleMaps.settings.myLocationButton = true
    googleMaps.settings.compassButton = true
    googleMaps.settings.zoomGestures = true
    googleMaps.animate(to: camera)
    self.view.addSubview(googleMaps)

    //Setting the start and end location
    let origin = "\(13.082680),\(80.270718)"
    let destination = "\(15.912900),\(79.739987)"


    let url = "https://maps.googleapis.com/maps/api/directions/json?origin=\(origin)&destination=\(destination)&mode=driving"

    //Rrequesting Alamofire and SwiftyJSON
    Alamofire.request(url).responseJSON  response in
        print(response.request as Any)  // original URL request
        print(response.response as Any) // HTTP URL response
        print(response.data as Any)     // server data
        print(response.result)   // result of response serialization

        do 
            let json = try JSON(data: response.data!)
            let routes = json["routes"].arrayValue

            for route in routes
            
                let routeOverviewPolyline = route["overview_polyline"].dictionary
                let points = routeOverviewPolyline?["points"]?.stringValue
                let path = GMSPath.init(fromEncodedPath: points!)
                let polyline = GMSPolyline.init(path: path)
                polyline.strokeColor = UIColor.blue
                polyline.strokeWidth = 2
                polyline.map = self.googleMaps
            
        
        catch 

        

    


    // Creates a marker in the center of the map.
    let marker = GMSMarker()
    marker.position = CLLocationCoordinate2D(latitude: 28.524555, longitude: 77.275111)
    marker.title = "Mobiloitte"
    marker.snippet = "India"
    marker.map = googleMaps

    //28.643091, 77.218280
    let marker1 = GMSMarker()
    marker1.position = CLLocationCoordinate2D(latitude: 28.643091, longitude: 77.218280)
    marker1.title = "NewDelhi"
    marker1.snippet = "India"
    marker1.map = googleMaps


// MARK: function for create a marker pin on map
func createMarker(titleMarker: String, iconMarker: UIImage, latitude: CLLocationDegrees, longitude: CLLocationDegrees) 
    let marker = GMSMarker()
    marker.position = CLLocationCoordinate2DMake(latitude, longitude)
    marker.isDraggable=true
    marker.title = titleMarker
    marker.icon = iconMarker
    marker.map = googleMaps


//MARK: - Location Manager delegates

func locationManager(_ manager: CLLocationManager, didFailWithError error: Error) 
    print("Error to get location : \(error)")




func locationManager(_ manager: CLLocationManager, didUpdateLocations locations: [CLLocation]) 

    let location = locations.last

    //  let camera = GMSCameraPosition.camera(withLatitude: (location?.coordinate.latitude)!, longitude: (location?.coordinate.longitude)!, zoom: 17.0)

    let locationMobi = CLLocation(latitude: 28.524555, longitude: 77.275111)


    drawPath(startLocation: location!, endLocation: locationMobi)

    //self.googleMaps?.animate(to: camera)
    self.locationManager.stopUpdatingLocation()



// MARK: - GMSMapViewDelegate

func mapView(_ mapView: GMSMapView, idleAt position: GMSCameraPosition) 
    googleMaps.isMyLocationEnabled = true


func mapView(_ mapView: GMSMapView, willMove gesture: Bool) 
    googleMaps.isMyLocationEnabled = true

    if (gesture) 
        mapView.selectedMarker = nil
    


func mapView(_ mapView: GMSMapView, didTap marker: GMSMarker) -> Bool 
    googleMaps.isMyLocationEnabled = true
    return false


func mapView(_ mapView: GMSMapView, didTapAt coordinate: CLLocationCoordinate2D) 
    print("COORDINATE \(coordinate)") // when you tapped coordinate


func didTapMyLocationButton(for mapView: GMSMapView) -> Bool 
    googleMaps.isMyLocationEnabled = true
    googleMaps.selectedMarker = nil
    return false


//MARK: - Marker Delegate

func mapView(_ mapView: GMSMapView, didDrag marker: GMSMarker) 




func mapView(_ mapView: GMSMapView, didBeginDragging marker: GMSMarker) 




func mapView(_ mapView: GMSMapView, didEndDragging marker: GMSMarker) 
    self.googleMaps.reloadInputViews()

    //self.polyline.map = nil;
    print("marker dragged to location: \(marker.position.latitude),\(marker.position.longitude)")
    let locationMobi = CLLocation(latitude: marker.position.latitude, longitude: marker.position.longitude)
    self.drawPath(startLocation: locationMobi, endLocation: locationEnd)





//MARK: - this is function for create direction path, from start location to desination location

func drawPath(startLocation: CLLocation, endLocation: CLLocation)

    let origin = "\(startLocation.coordinate.latitude),\(startLocation.coordinate.longitude)"
    let destination = "\(endLocation.coordinate.latitude),\(endLocation.coordinate.longitude)"

    self.polyline.map = nil
    //self.googleMaps.clear()

    let url = "https://maps.googleapis.com/maps/api/directions/json?origin=\(origin)&destination=\(destination)&mode=driving"

    Alamofire.request(url).responseJSON  response in

        print(response.request as Any)  // original URL request
        print(response.response as Any) // HTTP URL response
        print(response.data as Any)     // server data
        print(response.result as Any)   // result of response serialization

        do 
            let json = try JSON(data: response.data!)
            let routes = json["routes"].arrayValue

            // print route using Polyline
            for route in routes
            
                let routeOverviewPolyline = route["overview_polyline"].dictionary
                let points = routeOverviewPolyline?["points"]?.stringValue
                let path = GMSPath.init(fromEncodedPath: points!)
                self.polyline = GMSPolyline.init(path: path)
                self.polyline.strokeWidth = 2
                self.polyline.strokeColor = UIColor.red
                self.polyline.map = self.googleMaps
            
         catch 

        

    


// MARK: when start location tap, this will open the search location
@IBAction func openStartLocation(_ sender: UIButton) 

    let autoCompleteController = GMSAutocompleteViewController()
    autoCompleteController.delegate = self

    // selected location
    locationSelected = .startLocation

    // Change text color
    UISearchBar.appearance().setTextColor(color: UIColor.black)
    self.locationManager.stopUpdatingLocation()

    self.present(autoCompleteController, animated: true, completion: nil)


// MARK: when destination location tap, this will open the search location
@IBAction func openDestinationLocation(_ sender: UIButton) 

    let autoCompleteController = GMSAutocompleteViewController()
    autoCompleteController.delegate = self

    // selected location
    locationSelected = .destinationLocation

    // Change text color
    UISearchBar.appearance().setTextColor(color: UIColor.black)
    self.locationManager.stopUpdatingLocation()

    self.present(autoCompleteController, animated: true, completion: nil)



// MARK: SHOW DIRECTION WITH BUTTON
@IBAction func showDirection(_ sender: UIButton) 
    // when button direction tapped, must call drawpath func
    self.drawPath(startLocation: locationStart, endLocation: locationEnd)

// MARK: - GMS Auto Complete Delegate,用于自动完成搜索位置 扩展视图控制器:GMSAutocompleteViewControllerDelegate

func viewController(_ viewController: GMSAutocompleteViewController, didFailAutocompleteWithError error: Error) 
    print("Error \(error)")


func viewController(_ viewController: GMSAutocompleteViewController, didAutocompleteWith place: GMSPlace) 

    // Change map location
    let camera = GMSCameraPosition.camera(withLatitude: place.coordinate.latitude, longitude: place.coordinate.longitude, zoom: 16.0
    )

    // set coordinate to text
    if locationSelected == .startLocation 
        if (self.locationManager.location?.coordinate.longitude) != nil 
            startLocation.text = "\(place.coordinate.latitude), \(place.coordinate.longitude)"
            locationStart = CLLocation(latitude: place.coordinate.latitude, longitude: place.coordinate.longitude)
            createMarker(titleMarker: "Location Start", iconMarker: #imageLiteral(resourceName: "images"), latitude: place.coordinate.latitude, longitude: place.coordinate.longitude)
        else 
            // handle the error by declaring default value
        

     else 
        if (self.locationManager.location?.coordinate.longitude) != nil 
            destinationLocation.text = "\(place.coordinate.latitude), \(place.coordinate.longitude)"
            locationEnd = CLLocation(latitude: place.coordinate.latitude, longitude: place.coordinate.longitude)
            createMarker(titleMarker: "Location End", iconMarker: #imageLiteral(resourceName: "images"), latitude: place.coordinate.latitude, longitude: place.coordinate.longitude)
        else 
            // handle the error by declaring default value
        

    


    self.googleMaps.camera = camera
    self.dismiss(animated: true, completion: nil)



func wasCancelled(_ viewController: GMSAutocompleteViewController) 
    self.dismiss(animated: true, completion: nil)


func didRequestAutocompletePredictions(_ viewController: GMSAutocompleteViewController) 
    UIApplication.shared.isNetworkActivityIndicatorVisible = true


func didUpdateAutocompletePredictions(_ viewController: GMSAutocompleteViewController) 
    UIApplication.shared.isNetworkActivityIndicatorVisible = false

公共 func textFieldShouldReturn(_ textField: UITextField) -> Bool textField.resignFirstResponder() 返回真 公共扩展 UISearchBar

public func setTextColor(color: UIColor) 
    let svs = subviews.flatMap  $0.subviews 
    guard let tf = (svs.filter  $0 is UITextField ).first as? UITextField else  return 
    tf.textColor = color

【讨论】:

它为cameraview显示黑色图像。感谢您的回答 我使用 quickblox 进行视频聊天,我想为用户提供一个功能,可以拍摄当前视频聊天的实时摄像头视图。【参考方案2】:

这是在 QuickBlox 通话期间截取正在进行的视频的工作代码

@IBOutlet weak var stackView: UIStackView!

let stillImageOutput = AVCaptureStillImageOutput()

override func viewDidLoad() 
    super.viewDidLoad()

    QBRTCClient.initializeRTC()
    QBRTCClient.instance().add(self)

    cofigureVideo()


func cofigureVideo() 

    QBRTCConfig.mediaStreamConfiguration().videoCodec = .H264
    QBRTCConfig.setAnswerTimeInterval(30)
    QBRTCConfig.setStatsReportTimeInterval(5)

    let videoFormat = QBRTCVideoFormat.init()
    videoFormat.frameRate = 30
    videoFormat.pixelFormat = .format420f
    videoFormat.width = 640
    videoFormat.height = 480

    self.videoCapture = QBRTCCameraCapture.init(videoFormat: videoFormat, position: .front)

    self.videoCapture.previewLayer.videoGravity = AVLayerVideoGravity.resizeAspectFill

    self.videoCapture.startSession 

        self.stillImageOutput.outputSettings = [AVVideoCodecKey:AVVideoCodecJPEG]
        if self.videoCapture.captureSession.canAddOutput(self.stillImageOutput) 
            self.videoCapture.captureSession.addOutput(self.stillImageOutput)
        

        let localView = LocalVideoView.init(withPreviewLayer:self.videoCapture.previewLayer)
        self.stackView.addArrangedSubview(localView)
    

拍照按钮点击

@IBAction func TakePhotoTapped(_ sender: Any) 

    if let videoConnection = stillImageOutput.connection(with: AVMediaType.video) 
        stillImageOutput.captureStillImageAsynchronously(from: videoConnection) 
            (imageDataSampleBuffer, error) -> Void in
            let imageData = AVCaptureStillImageOutput.jpegStillImageNSDataRepresentation(imageDataSampleBuffer!)

            if let image = UIImage(data: imageData!)
                // Your image is Here
            
        
    

【讨论】:

以上是关于如何在 QuickBlox 通话期间截取正在进行的视频的主要内容,如果未能解决你的问题,请参考以下文章

Quickblox 语音通话/视频通话

Android 上的 Quickblox 视频通话

如何使用 QuickBlox 录制视频通话

Quickblox:对离线用户进行视频通话的策略是啥

QuickBlox Chat:无法进行视频通话

quickblox 接受视频聊天通话问题