VideoSinkGStreamer.cpp [plain text]
#include "config.h"
#include "VideoSinkGStreamer.h"
#if ENABLE(VIDEO) && USE(GSTREAMER)
#include "GRefPtrGStreamer.h"
#include "GStreamerUtilities.h"
#include "IntSize.h"
#include <glib.h>
#include <gst/gst.h>
#include <gst/video/gstvideometa.h>
#include <wtf/Condition.h>
#include <wtf/RunLoop.h>
using namespace WebCore;
#if G_BYTE_ORDER == G_LITTLE_ENDIAN
#define GST_CAPS_FORMAT "{ BGRx, BGRA }"
#else
#define GST_CAPS_FORMAT "{ xRGB, ARGB }"
#endif
#if GST_CHECK_VERSION(1, 1, 0)
#define GST_FEATURED_CAPS GST_VIDEO_CAPS_MAKE_WITH_FEATURES(GST_CAPS_FEATURE_META_GST_VIDEO_GL_TEXTURE_UPLOAD_META, GST_CAPS_FORMAT) ";"
#else
#define GST_FEATURED_CAPS
#endif
#define WEBKIT_VIDEO_SINK_PAD_CAPS GST_FEATURED_CAPS GST_VIDEO_CAPS_MAKE(GST_CAPS_FORMAT)
static GstStaticPadTemplate s_sinkTemplate = GST_STATIC_PAD_TEMPLATE("sink", GST_PAD_SINK, GST_PAD_ALWAYS, GST_STATIC_CAPS(WEBKIT_VIDEO_SINK_PAD_CAPS));
GST_DEBUG_CATEGORY_STATIC(webkitVideoSinkDebug);
#define GST_CAT_DEFAULT webkitVideoSinkDebug
enum {
REPAINT_REQUESTED,
LAST_SIGNAL
};
static guint webkitVideoSinkSignals[LAST_SIGNAL] = { 0, };
static void webkitVideoSinkRepaintRequested(WebKitVideoSink*, GstSample*);
static GRefPtr<GstSample> webkitVideoSinkRequestRender(WebKitVideoSink*, GstBuffer*);
class VideoRenderRequestScheduler {
public:
VideoRenderRequestScheduler()
#if !USE(COORDINATED_GRAPHICS_THREADED)
: m_timer(RunLoop::main(), this, &VideoRenderRequestScheduler::render)
#endif
{
#if PLATFORM(GTK) && !USE(COORDINATED_GRAPHICS_THREADED)
m_timer.setPriority(G_PRIORITY_HIGH_IDLE + 19);
#endif
}
void start()
{
LockHolder locker(m_sampleMutex);
m_unlocked = false;
}
void stop()
{
LockHolder locker(m_sampleMutex);
m_sample = nullptr;
m_unlocked = true;
#if !USE(COORDINATED_GRAPHICS_THREADED)
m_timer.stop();
m_dataCondition.notifyOne();
#endif
}
bool requestRender(WebKitVideoSink* sink, GstBuffer* buffer)
{
LockHolder locker(m_sampleMutex);
if (m_unlocked)
return true;
m_sample = webkitVideoSinkRequestRender(sink, buffer);
if (!m_sample)
return false;
#if USE(COORDINATED_GRAPHICS_THREADED)
if (LIKELY(GST_IS_SAMPLE(m_sample.get())))
webkitVideoSinkRepaintRequested(sink, m_sample.get());
m_sample = nullptr;
#else
m_sink = sink;
m_timer.startOneShot(0);
m_dataCondition.wait(m_sampleMutex);
#endif
return true;
}
private:
#if !USE(COORDINATED_GRAPHICS_THREADED)
void render()
{
LockHolder locker(m_sampleMutex);
GRefPtr<GstSample> sample = WTFMove(m_sample);
GRefPtr<WebKitVideoSink> sink = WTFMove(m_sink);
if (sample && !m_unlocked && LIKELY(GST_IS_SAMPLE(sample.get())))
webkitVideoSinkRepaintRequested(sink.get(), sample.get());
m_dataCondition.notifyOne();
}
#endif
Lock m_sampleMutex;
GRefPtr<GstSample> m_sample;
#if !USE(COORDINATED_GRAPHICS_THREADED)
RunLoop::Timer<VideoRenderRequestScheduler> m_timer;
Condition m_dataCondition;
GRefPtr<WebKitVideoSink> m_sink;
#endif
bool m_unlocked { false };
};
struct _WebKitVideoSinkPrivate {
_WebKitVideoSinkPrivate()
{
gst_video_info_init(&info);
}
~_WebKitVideoSinkPrivate()
{
if (currentCaps)
gst_caps_unref(currentCaps);
}
VideoRenderRequestScheduler scheduler;
GstVideoInfo info;
GstCaps* currentCaps;
};
#define webkit_video_sink_parent_class parent_class
G_DEFINE_TYPE_WITH_CODE(WebKitVideoSink, webkit_video_sink, GST_TYPE_VIDEO_SINK, GST_DEBUG_CATEGORY_INIT(webkitVideoSinkDebug, "webkitsink", 0, "webkit video sink"));
static void webkit_video_sink_init(WebKitVideoSink* sink)
{
sink->priv = G_TYPE_INSTANCE_GET_PRIVATE(sink, WEBKIT_TYPE_VIDEO_SINK, WebKitVideoSinkPrivate);
g_object_set(GST_BASE_SINK(sink), "enable-last-sample", FALSE, NULL);
new (sink->priv) WebKitVideoSinkPrivate();
}
static void webkitVideoSinkRepaintRequested(WebKitVideoSink* sink, GstSample* sample)
{
g_signal_emit(sink, webkitVideoSinkSignals[REPAINT_REQUESTED], 0, sample);
}
static GRefPtr<GstSample> webkitVideoSinkRequestRender(WebKitVideoSink* sink, GstBuffer* buffer)
{
WebKitVideoSinkPrivate* priv = sink->priv;
GRefPtr<GstSample> sample = adoptGRef(gst_sample_new(buffer, priv->currentCaps, nullptr, nullptr));
GstVideoFormat format = GST_VIDEO_INFO_FORMAT(&priv->info);
if (format == GST_VIDEO_FORMAT_UNKNOWN)
return nullptr;
#if !(USE(TEXTURE_MAPPER_GL) && !USE(COORDINATED_GRAPHICS))
if (format == GST_VIDEO_FORMAT_ARGB || format == GST_VIDEO_FORMAT_BGRA) {
GstBuffer* newBuffer = WebCore::createGstBuffer(buffer);
if (UNLIKELY(!newBuffer))
return nullptr;
GstVideoFrame sourceFrame;
GstVideoFrame destinationFrame;
if (!gst_video_frame_map(&sourceFrame, &priv->info, buffer, GST_MAP_READ)) {
gst_buffer_unref(newBuffer);
return nullptr;
}
if (!gst_video_frame_map(&destinationFrame, &priv->info, newBuffer, GST_MAP_WRITE)) {
gst_video_frame_unmap(&sourceFrame);
gst_buffer_unref(newBuffer);
return nullptr;
}
const guint8* source = static_cast<guint8*>(GST_VIDEO_FRAME_PLANE_DATA(&sourceFrame, 0));
guint8* destination = static_cast<guint8*>(GST_VIDEO_FRAME_PLANE_DATA(&destinationFrame, 0));
for (int x = 0; x < GST_VIDEO_FRAME_HEIGHT(&sourceFrame); x++) {
for (int y = 0; y < GST_VIDEO_FRAME_WIDTH(&sourceFrame); y++) {
#if G_BYTE_ORDER == G_LITTLE_ENDIAN
unsigned short alpha = source[3];
destination[0] = (source[0] * alpha + 128) / 255;
destination[1] = (source[1] * alpha + 128) / 255;
destination[2] = (source[2] * alpha + 128) / 255;
destination[3] = alpha;
#else
unsigned short alpha = source[0];
destination[0] = alpha;
destination[1] = (source[1] * alpha + 128) / 255;
destination[2] = (source[2] * alpha + 128) / 255;
destination[3] = (source[3] * alpha + 128) / 255;
#endif
source += 4;
destination += 4;
}
}
gst_video_frame_unmap(&sourceFrame);
gst_video_frame_unmap(&destinationFrame);
sample = adoptGRef(gst_sample_new(newBuffer, priv->currentCaps, nullptr, nullptr));
gst_buffer_unref(newBuffer);
}
#endif
return sample;
}
static GstFlowReturn webkitVideoSinkRender(GstBaseSink* baseSink, GstBuffer* buffer)
{
WebKitVideoSink* sink = WEBKIT_VIDEO_SINK(baseSink);
return sink->priv->scheduler.requestRender(sink, buffer) ? GST_FLOW_OK : GST_FLOW_ERROR;
}
static void webkitVideoSinkFinalize(GObject* object)
{
WEBKIT_VIDEO_SINK(object)->priv->~WebKitVideoSinkPrivate();
G_OBJECT_CLASS(parent_class)->finalize(object);
}
static gboolean webkitVideoSinkUnlock(GstBaseSink* baseSink)
{
WebKitVideoSinkPrivate* priv = WEBKIT_VIDEO_SINK(baseSink)->priv;
priv->scheduler.stop();
return GST_CALL_PARENT_WITH_DEFAULT(GST_BASE_SINK_CLASS, unlock, (baseSink), TRUE);
}
static gboolean webkitVideoSinkUnlockStop(GstBaseSink* baseSink)
{
WebKitVideoSinkPrivate* priv = WEBKIT_VIDEO_SINK(baseSink)->priv;
priv->scheduler.start();
return GST_CALL_PARENT_WITH_DEFAULT(GST_BASE_SINK_CLASS, unlock_stop, (baseSink), TRUE);
}
static gboolean webkitVideoSinkStop(GstBaseSink* baseSink)
{
WebKitVideoSinkPrivate* priv = WEBKIT_VIDEO_SINK(baseSink)->priv;
priv->scheduler.stop();
if (priv->currentCaps) {
gst_caps_unref(priv->currentCaps);
priv->currentCaps = nullptr;
}
return TRUE;
}
static gboolean webkitVideoSinkStart(GstBaseSink* baseSink)
{
WebKitVideoSinkPrivate* priv = WEBKIT_VIDEO_SINK(baseSink)->priv;
priv->scheduler.start();
return TRUE;
}
static gboolean webkitVideoSinkSetCaps(GstBaseSink* baseSink, GstCaps* caps)
{
WebKitVideoSink* sink = WEBKIT_VIDEO_SINK(baseSink);
WebKitVideoSinkPrivate* priv = sink->priv;
GST_DEBUG_OBJECT(sink, "Current caps %" GST_PTR_FORMAT ", setting caps %" GST_PTR_FORMAT, priv->currentCaps, caps);
GstVideoInfo videoInfo;
gst_video_info_init(&videoInfo);
if (!gst_video_info_from_caps(&videoInfo, caps)) {
GST_ERROR_OBJECT(sink, "Invalid caps %" GST_PTR_FORMAT, caps);
return FALSE;
}
priv->info = videoInfo;
gst_caps_replace(&priv->currentCaps, caps);
return TRUE;
}
static gboolean webkitVideoSinkProposeAllocation(GstBaseSink* baseSink, GstQuery* query)
{
GstCaps* caps;
gst_query_parse_allocation(query, &caps, 0);
if (!caps)
return FALSE;
WebKitVideoSink* sink = WEBKIT_VIDEO_SINK(baseSink);
if (!gst_video_info_from_caps(&sink->priv->info, caps))
return FALSE;
gst_query_add_allocation_meta(query, GST_VIDEO_META_API_TYPE, 0);
gst_query_add_allocation_meta(query, GST_VIDEO_CROP_META_API_TYPE, 0);
#if GST_CHECK_VERSION(1, 1, 0)
gst_query_add_allocation_meta(query, GST_VIDEO_GL_TEXTURE_UPLOAD_META_API_TYPE, 0);
#endif
return TRUE;
}
static void webkit_video_sink_class_init(WebKitVideoSinkClass* klass)
{
GObjectClass* gobjectClass = G_OBJECT_CLASS(klass);
GstBaseSinkClass* baseSinkClass = GST_BASE_SINK_CLASS(klass);
GstElementClass* elementClass = GST_ELEMENT_CLASS(klass);
gst_element_class_add_pad_template(elementClass, gst_static_pad_template_get(&s_sinkTemplate));
gst_element_class_set_metadata(elementClass, "WebKit video sink", "Sink/Video", "Sends video data from a GStreamer pipeline to WebKit", "Igalia, Alp Toker <alp@atoker.com>");
g_type_class_add_private(klass, sizeof(WebKitVideoSinkPrivate));
gobjectClass->finalize = webkitVideoSinkFinalize;
baseSinkClass->unlock = webkitVideoSinkUnlock;
baseSinkClass->unlock_stop = webkitVideoSinkUnlockStop;
baseSinkClass->render = webkitVideoSinkRender;
baseSinkClass->preroll = webkitVideoSinkRender;
baseSinkClass->stop = webkitVideoSinkStop;
baseSinkClass->start = webkitVideoSinkStart;
baseSinkClass->set_caps = webkitVideoSinkSetCaps;
baseSinkClass->propose_allocation = webkitVideoSinkProposeAllocation;
webkitVideoSinkSignals[REPAINT_REQUESTED] = g_signal_new("repaint-requested",
G_TYPE_FROM_CLASS(klass),
static_cast<GSignalFlags>(G_SIGNAL_RUN_LAST | G_SIGNAL_ACTION),
0, 0, 0, g_cclosure_marshal_generic,
G_TYPE_NONE, 1, GST_TYPE_SAMPLE);
}
GstElement* webkitVideoSinkNew()
{
return GST_ELEMENT(g_object_new(WEBKIT_TYPE_VIDEO_SINK, 0));
}
#endif // ENABLE(VIDEO) && USE(GSTREAMER)