RealtimeOutgoingVideoSourceCocoa.cpp [plain text]
#include "config.h"
#include "RealtimeOutgoingVideoSourceCocoa.h"
#if USE(LIBWEBRTC)
#include "Logging.h"
#include <webrtc/api/video/i420_buffer.h>
#include <webrtc/common_video/libyuv/include/webrtc_libyuv.h>
#include <webrtc/sdk/objc/Framework/Classes/Video/corevideo_frame_buffer.h>
#include <pal/cf/CoreMediaSoftLink.h>
#include "CoreVideoSoftLink.h"
namespace WebCore {
using namespace PAL;
Ref<RealtimeOutgoingVideoSource> RealtimeOutgoingVideoSource::create(Ref<MediaStreamTrackPrivate>&& videoSource)
{
return RealtimeOutgoingVideoSourceCocoa::create(WTFMove(videoSource));
}
Ref<RealtimeOutgoingVideoSourceCocoa> RealtimeOutgoingVideoSourceCocoa::create(Ref<MediaStreamTrackPrivate>&& videoSource)
{
return adoptRef(*new RealtimeOutgoingVideoSourceCocoa(WTFMove(videoSource)));
}
RealtimeOutgoingVideoSourceCocoa::RealtimeOutgoingVideoSourceCocoa(Ref<MediaStreamTrackPrivate>&& videoSource)
: RealtimeOutgoingVideoSource(WTFMove(videoSource))
{
}
void RealtimeOutgoingVideoSourceCocoa::sampleBufferUpdated(MediaStreamTrackPrivate&, MediaSample& sample)
{
if (!m_sinks.size())
return;
if (m_muted || !m_enabled)
return;
#if !RELEASE_LOG_DISABLED
if (!(++m_numberOfFrames % 30))
RELEASE_LOG(MediaStream, "RealtimeOutgoingVideoSourceCocoa::sendFrame %zu frame", m_numberOfFrames);
#endif
switch (sample.videoRotation()) {
case MediaSample::VideoRotation::None:
m_currentRotation = webrtc::kVideoRotation_0;
break;
case MediaSample::VideoRotation::UpsideDown:
m_currentRotation = webrtc::kVideoRotation_180;
break;
case MediaSample::VideoRotation::Right:
m_currentRotation = webrtc::kVideoRotation_90;
break;
case MediaSample::VideoRotation::Left:
m_currentRotation = webrtc::kVideoRotation_270;
break;
}
ASSERT(sample.platformSample().type == PlatformSample::CMSampleBufferType);
auto pixelBuffer = static_cast<CVPixelBufferRef>(CMSampleBufferGetImageBuffer(sample.platformSample().sample.cmSampleBuffer));
auto pixelFormatType = CVPixelBufferGetPixelFormatType(pixelBuffer);
if (pixelFormatType == kCVPixelFormatType_420YpCbCr8Planar || pixelFormatType == kCVPixelFormatType_420YpCbCr8BiPlanarFullRange) {
rtc::scoped_refptr<webrtc::VideoFrameBuffer> buffer = new rtc::RefCountedObject<webrtc::CoreVideoFrameBuffer>(pixelBuffer);
if (m_shouldApplyRotation && m_currentRotation != webrtc::kVideoRotation_0) {
auto rotatedBuffer = buffer->ToI420();
ASSERT(rotatedBuffer);
buffer = webrtc::I420Buffer::Rotate(*rotatedBuffer, m_currentRotation);
}
sendFrame(WTFMove(buffer));
return;
}
CVPixelBufferLockBaseAddress(pixelBuffer, 0);
auto* source = reinterpret_cast<uint8_t*>(CVPixelBufferGetBaseAddressOfPlane(pixelBuffer, 0));
ASSERT(m_width);
ASSERT(m_height);
auto newBuffer = m_bufferPool.CreateBuffer(m_width, m_height);
ASSERT(newBuffer);
if (!newBuffer) {
RELEASE_LOG(WebRTC, "RealtimeOutgoingVideoSourceCocoa::videoSampleAvailable unable to allocate buffer for conversion to YUV");
return;
}
if (pixelFormatType == kCVPixelFormatType_32BGRA)
webrtc::ConvertToI420(webrtc::VideoType::kARGB, source, 0, 0, m_width, m_height, 0, webrtc::kVideoRotation_0, newBuffer);
else {
ASSERT(pixelFormatType == kCVPixelFormatType_32ARGB);
webrtc::ConvertToI420(webrtc::VideoType::kBGRA, source, 0, 0, m_width, m_height, 0, webrtc::kVideoRotation_0, newBuffer);
}
CVPixelBufferUnlockBaseAddress(pixelBuffer, 0);
if (m_shouldApplyRotation && m_currentRotation != webrtc::kVideoRotation_0)
newBuffer = webrtc::I420Buffer::Rotate(*newBuffer, m_currentRotation);
sendFrame(WTFMove(newBuffer));
}
}
#endif // USE(LIBWEBRTC)