You signed in with another tab or window. Reload to refresh your session.You signed out in another tab or window. Reload to refresh your session.You switched accounts on another tab or window. Reload to refresh your session.Dismiss alert
Hi, I always like using Haishinkit's camera directly but in my project, there is a sudden need to use AVCaptureDataOutputSynchronizerDelegate to manipulate CMSampleBuffer from dual wide-angle camera stream synced at the same time. First let me post the sample I'm working on:
privateclassMultiCamModel:NSObject,ObservableObject,AVCaptureDataOutputSynchronizerDelegate,AVCaptureAudioDataOutputSampleBufferDelegate{
//variables
privateletsessionQueue=DispatchQueue(label:"com.demo.sessionQueue")vardualVideoSession=AVCaptureMultiCamSession()varsynchronizer:AVCaptureDataOutputSynchronizer?letwideDataOutput=AVCaptureVideoDataOutput()letultrawideDataOutput=AVCaptureVideoDataOutput()letaudioDataOutput=AVCaptureAudioDataOutput()
//haishinkit
privatevarstream:RTMPStream?privateletmixer=MediaMixer()privateletconnection=RTMPConnection()
//configure camera
privatefunc configureCaptureSession(){
sessionQueue.async{[weak self]in
guard let self else{return}do{LBLogger.with(kHaishinKitIdentifier).level =.trace
dualVideoSession.beginConfiguration()
stream =RTMPStream(connection: connection) //<-- RTMPStream
Task{letsettings=VideoCodecSettings(
videoSize:.init(width:3840, height:1080), bitRate:4*1000*1000) //<-- ERROR to append stream when 'videoSize' is used. Works well without 'videoSize' but the video is cropped within the default frame of 854x480
awaitself.stream?.setVideoSettings(settings)}
guard
let cameraDevice =AVCaptureDevice.default(.builtInDualWideCamera, for:.video, position:.back),let microphoneDevice =AVCaptureDevice.default(for:.audio)else{print("no camera or microphone device")return}letcameraInput=tryAVCaptureDeviceInput(device: cameraDevice)letmicrophoneInput=tryAVCaptureDeviceInput(device: microphoneDevice)
guard
let widePort = cameraInput.ports(
for:.video, sourceDeviceType:.builtInWideAngleCamera, sourceDevicePosition:.back
).first,let ultrawidePort = cameraInput.ports(
for:.video, sourceDeviceType:.builtInUltraWideCamera, sourceDevicePosition:.back
).first,let audioPort = microphoneInput.ports(
for:.audio, sourceDeviceType: microphoneDevice.deviceType,
sourceDevicePosition: microphoneDevice.position
).first
else{print("Could not obtain input ports")return}
//start of connection hell
dualVideoSession.addInputWithNoConnections(cameraInput)
dualVideoSession.addInputWithNoConnections(microphoneInput)
dualVideoSession.addOutputWithNoConnections(wideDataOutput)
dualVideoSession.addOutputWithNoConnections(ultrawideDataOutput)
dualVideoSession.addOutputWithNoConnections(audioDataOutput)letwideAngleCameraConnection=AVCaptureConnection(
inputPorts:[widePort], output: wideDataOutput)letultrawideAngleCameraConnection=AVCaptureConnection(
inputPorts:[ultrawidePort], output: ultrawideDataOutput)letmicrophoneConnection=AVCaptureConnection(
inputPorts:[audioPort], output: audioDataOutput)
dualVideoSession.addConnection(wideAngleCameraConnection)
dualVideoSession.addConnection(ultrawideAngleCameraConnection)
dualVideoSession.addConnection(microphoneConnection)
//end of connection hell
synchronizer =AVCaptureDataOutputSynchronizer(dataOutputs:[
wideDataOutput, ultrawideDataOutput,])
synchronizer!.setDelegate(self, queue: sessionQueue) //listen for video data
audioDataOutput.setSampleBufferDelegate(self, queue: sessionQueue) //listen for audio data
dualVideoSession.commitConfiguration()
dualVideoSession.startRunning()
//success in setup
}catch{
//err
}}}
//listen to callback for Video Data
func dataOutputSynchronizer(
_ synchronizer:AVCaptureDataOutputSynchronizer,
didOutput synchronizedDataCollection:AVCaptureSynchronizedDataCollection){
//maipulate multiple video frames and once the work is done then append the new sample buffer into RTMPStream
letimageBuffer=CMSampleBufferGetImageBuffer(newSampleBuffer!)!
Task{await stream?.append(newSampleBuffer!) //<-- Append to Stream
}}
//listen for Audio Data
func captureOutput(
_ output:AVCaptureOutput, didOutput sampleBuffer:CMSampleBuffer,
from connection:AVCaptureConnection){
//<-- HOW to Append SampleBuffer to RTMPStream?
}}
So now I have few questions: 1. How to set videoSize in VideoCodecSettings without causing this error ?
reacted with thumbs up emoji reacted with thumbs down emoji reacted with laugh emoji reacted with hooray emoji reacted with confused emoji reacted with heart emoji reacted with rocket emoji reacted with eyes emoji
-
Hi, I always like using
Haishinkit
's camera directly but in my project, there is a sudden need to useAVCaptureDataOutputSynchronizerDelegate
to manipulateCMSampleBuffer
from dual wide-angle camera stream synced at the same time. First let me post the sample I'm working on:So now I have few questions:
1. How to set videoSize in
VideoCodecSettings
without causing this error ?2. How to append Audio in the stream? I tried this code
but it causes this error:
Beta Was this translation helpful? Give feedback.
All reactions