MediaPlayerPrivateGStreamerBase.cpp [plain text]
#include "config.h"
#include "MediaPlayerPrivateGStreamerBase.h"
#if ENABLE(VIDEO) && USE(GSTREAMER)
#include "ColorSpace.h"
#include "GStreamerUtilities.h"
#include "GraphicsContext.h"
#include "GraphicsTypes.h"
#include "ImageGStreamer.h"
#include "ImageOrientation.h"
#include "IntRect.h"
#include "MediaPlayer.h"
#include "NotImplemented.h"
#include "VideoSinkGStreamer.h"
#include "WebKitWebSourceGStreamer.h"
#include <gst/gst.h>
#include <wtf/glib/GMutexLocker.h>
#include <wtf/text/CString.h>
#include <gst/audio/streamvolume.h>
#include <gst/video/gstvideometa.h>
#if USE(GSTREAMER_GL)
#define GST_USE_UNSTABLE_API
#include <gst/gl/gstglmemory.h>
#undef GST_USE_UNSTABLE_API
#endif
#if GST_CHECK_VERSION(1, 1, 0) && USE(TEXTURE_MAPPER_GL)
#include "BitmapTextureGL.h"
#include "BitmapTexturePool.h"
#include "TextureMapperGL.h"
#endif
#if USE(GSTREAMER_GL)
#include "GLContext.h"
#define GST_USE_UNSTABLE_API
#include <gst/gl/gl.h>
#undef GST_USE_UNSTABLE_API
#if USE(GLX)
#include "GLContextGLX.h"
#include <gst/gl/x11/gstgldisplay_x11.h>
#elif USE(EGL)
#include "GLContextEGL.h"
#include <gst/gl/egl/gstgldisplay_egl.h>
#endif
#if PLATFORM(X11)
#include "PlatformDisplayX11.h"
#elif PLATFORM(WAYLAND)
#include "PlatformDisplayWayland.h"
#endif
#if PLATFORM(X11) && GST_GL_HAVE_PLATFORM_EGL
#undef None
#endif
#endif // USE(GSTREAMER_GL)
GST_DEBUG_CATEGORY(webkit_media_player_debug);
#define GST_CAT_DEFAULT webkit_media_player_debug
using namespace std;
namespace WebCore {
static int greatestCommonDivisor(int a, int b)
{
while (b) {
int temp = a;
a = b;
b = temp % b;
}
return ABS(a);
}
static void mediaPlayerPrivateVolumeChangedCallback(GObject*, GParamSpec*, MediaPlayerPrivateGStreamerBase* player)
{
LOG_MEDIA_MESSAGE("Volume changed to: %f", player->volume());
player->volumeChanged();
}
static void mediaPlayerPrivateMuteChangedCallback(GObject*, GParamSpec*, MediaPlayerPrivateGStreamerBase* player)
{
player->muteChanged();
}
static void mediaPlayerPrivateRepaintCallback(WebKitVideoSink*, GstSample* sample, MediaPlayerPrivateGStreamerBase* playerPrivate)
{
playerPrivate->triggerRepaint(sample);
}
#if USE(GSTREAMER_GL)
static gboolean mediaPlayerPrivateDrawCallback(GstElement*, GstContext*, GstSample* sample, MediaPlayerPrivateGStreamerBase* playerPrivate)
{
playerPrivate->triggerRepaint(sample);
return TRUE;
}
#endif
static void mediaPlayerPrivateNeedContextMessageCallback(GstBus*, GstMessage* message, MediaPlayerPrivateGStreamerBase* player)
{
player->handleNeedContextMessage(message);
}
MediaPlayerPrivateGStreamerBase::MediaPlayerPrivateGStreamerBase(MediaPlayer* player)
: m_player(player)
, m_fpsSink(0)
, m_readyState(MediaPlayer::HaveNothing)
, m_networkState(MediaPlayer::Empty)
, m_repaintHandler(0)
, m_volumeSignalHandler(0)
, m_muteSignalHandler(0)
, m_usingFallbackVideoSink(false)
{
g_mutex_init(&m_sampleMutex);
#if USE(GSTREAMER_GL)
g_cond_init(&m_drawCondition);
g_mutex_init(&m_drawMutex);
#endif
}
MediaPlayerPrivateGStreamerBase::~MediaPlayerPrivateGStreamerBase()
{
if (m_repaintHandler) {
g_signal_handler_disconnect(m_videoSink.get(), m_repaintHandler);
m_repaintHandler = 0;
}
g_mutex_clear(&m_sampleMutex);
m_player = 0;
if (m_volumeSignalHandler) {
g_signal_handler_disconnect(m_volumeElement.get(), m_volumeSignalHandler);
m_volumeSignalHandler = 0;
}
if (m_muteSignalHandler) {
g_signal_handler_disconnect(m_volumeElement.get(), m_muteSignalHandler);
m_muteSignalHandler = 0;
}
#if USE(GSTREAMER_GL)
g_cond_clear(&m_drawCondition);
g_mutex_clear(&m_drawMutex);
#endif
if (m_pipeline) {
GRefPtr<GstBus> bus = adoptGRef(gst_pipeline_get_bus(GST_PIPELINE(m_pipeline.get())));
ASSERT(bus);
g_signal_handlers_disconnect_by_func(bus.get(), reinterpret_cast<gpointer>(mediaPlayerPrivateNeedContextMessageCallback), this);
gst_bus_disable_sync_message_emission(bus.get());
m_pipeline.clear();
}
#if USE(TEXTURE_MAPPER_GL) && !USE(COORDINATED_GRAPHICS)
if (client())
client()->platformLayerWillBeDestroyed();
#endif
}
void MediaPlayerPrivateGStreamerBase::setPipeline(GstElement* pipeline)
{
m_pipeline = pipeline;
GRefPtr<GstBus> bus = adoptGRef(gst_pipeline_get_bus(GST_PIPELINE(m_pipeline.get())));
gst_bus_enable_sync_message_emission(bus.get());
g_signal_connect(bus.get(), "sync-message::need-context", G_CALLBACK(mediaPlayerPrivateNeedContextMessageCallback), this);
}
void MediaPlayerPrivateGStreamerBase::handleNeedContextMessage(GstMessage* message)
{
#if USE(GSTREAMER_GL)
const gchar* contextType;
gst_message_parse_context_type(message, &contextType);
if (!ensureGstGLContext())
return;
if (!g_strcmp0(contextType, GST_GL_DISPLAY_CONTEXT_TYPE)) {
GstContext* displayContext = gst_context_new(GST_GL_DISPLAY_CONTEXT_TYPE, TRUE);
gst_context_set_gl_display(displayContext, m_glDisplay.get());
gst_element_set_context(GST_ELEMENT(message->src), displayContext);
return;
}
if (!g_strcmp0(contextType, "gst.gl.app_context")) {
GstContext* appContext = gst_context_new("gst.gl.app_context", TRUE);
GstStructure* structure = gst_context_writable_structure(appContext);
gst_structure_set(structure, "context", GST_GL_TYPE_CONTEXT, m_glContext.get(), nullptr);
gst_element_set_context(GST_ELEMENT(message->src), appContext);
return;
}
#else
UNUSED_PARAM(message);
#endif // USE(GSTREAMER_GL)
}
#if USE(GSTREAMER_GL)
bool MediaPlayerPrivateGStreamerBase::ensureGstGLContext()
{
if (m_glContext)
return true;
if (!m_glDisplay) {
const auto& sharedDisplay = PlatformDisplay::sharedDisplay();
#if PLATFORM(X11)
m_glDisplay = GST_GL_DISPLAY(gst_gl_display_x11_new_with_display(downcast<PlatformDisplayX11>(sharedDisplay).native()));
#elif PLATFORM(WAYLAND)
m_glDisplay = GST_GL_DISPLAY(gst_gl_display_egl_new_with_egl_display(downcast<PlatformDisplayWayland>(sharedDisplay).native()));
#endif
}
GLContext* webkitContext = GLContext::sharingContext();
GstGLPlatform glPlatform = webkitContext->isEGLContext() ? GST_GL_PLATFORM_EGL : GST_GL_PLATFORM_GLX;
#if USE(OPENGL_ES_2)
GstGLAPI glAPI = GST_GL_API_GLES2;
#elif USE(OPENGL)
GstGLAPI glAPI = GST_GL_API_OPENGL;
#else
ASSERT_NOT_REACHED();
#endif
PlatformGraphicsContext3D contextHandle = webkitContext->platformContext();
if (!contextHandle)
return false;
m_glContext = gst_gl_context_new_wrapped(m_glDisplay.get(), reinterpret_cast<guintptr>(contextHandle), glPlatform, glAPI);
return true;
}
#endif // USE(GSTREAMER_GL)
FloatSize MediaPlayerPrivateGStreamerBase::naturalSize() const
{
if (!hasVideo())
return FloatSize();
if (!m_videoSize.isEmpty())
return m_videoSize;
WTF::GMutexLocker<GMutex> lock(m_sampleMutex);
if (!GST_IS_SAMPLE(m_sample.get()))
return FloatSize();
GstCaps* caps = gst_sample_get_caps(m_sample.get());
if (!caps)
return FloatSize();
int pixelAspectRatioNumerator, pixelAspectRatioDenominator, stride;
IntSize originalSize;
GstVideoFormat format;
if (!getVideoSizeAndFormatFromCaps(caps, originalSize, format, pixelAspectRatioNumerator, pixelAspectRatioDenominator, stride))
return FloatSize();
LOG_MEDIA_MESSAGE("Original video size: %dx%d", originalSize.width(), originalSize.height());
LOG_MEDIA_MESSAGE("Pixel aspect ratio: %d/%d", pixelAspectRatioNumerator, pixelAspectRatioDenominator);
int displayWidth = originalSize.width() * pixelAspectRatioNumerator;
int displayHeight = originalSize.height() * pixelAspectRatioDenominator;
int displayAspectRatioGCD = greatestCommonDivisor(displayWidth, displayHeight);
displayWidth /= displayAspectRatioGCD;
displayHeight /= displayAspectRatioGCD;
guint64 width = 0, height = 0;
if (!(originalSize.height() % displayHeight)) {
LOG_MEDIA_MESSAGE("Keeping video original height");
width = gst_util_uint64_scale_int(originalSize.height(), displayWidth, displayHeight);
height = static_cast<guint64>(originalSize.height());
} else if (!(originalSize.width() % displayWidth)) {
LOG_MEDIA_MESSAGE("Keeping video original width");
height = gst_util_uint64_scale_int(originalSize.width(), displayHeight, displayWidth);
width = static_cast<guint64>(originalSize.width());
} else {
LOG_MEDIA_MESSAGE("Approximating while keeping original video height");
width = gst_util_uint64_scale_int(originalSize.height(), displayWidth, displayHeight);
height = static_cast<guint64>(originalSize.height());
}
LOG_MEDIA_MESSAGE("Natural size: %" G_GUINT64_FORMAT "x%" G_GUINT64_FORMAT, width, height);
m_videoSize = FloatSize(static_cast<int>(width), static_cast<int>(height));
return m_videoSize;
}
void MediaPlayerPrivateGStreamerBase::setVolume(float volume)
{
if (!m_volumeElement)
return;
LOG_MEDIA_MESSAGE("Setting volume: %f", volume);
gst_stream_volume_set_volume(m_volumeElement.get(), GST_STREAM_VOLUME_FORMAT_CUBIC, static_cast<double>(volume));
}
float MediaPlayerPrivateGStreamerBase::volume() const
{
if (!m_volumeElement)
return 0;
return gst_stream_volume_get_volume(m_volumeElement.get(), GST_STREAM_VOLUME_FORMAT_CUBIC);
}
void MediaPlayerPrivateGStreamerBase::notifyPlayerOfVolumeChange()
{
if (!m_player || !m_volumeElement)
return;
double volume;
volume = gst_stream_volume_get_volume(m_volumeElement.get(), GST_STREAM_VOLUME_FORMAT_CUBIC);
volume = CLAMP(volume, 0.0, 1.0);
m_player->volumeChanged(static_cast<float>(volume));
}
void MediaPlayerPrivateGStreamerBase::volumeChanged()
{
m_volumeTimerHandler.schedule("[WebKit] MediaPlayerPrivateGStreamerBase::volumeChanged", std::function<void()>(std::bind(&MediaPlayerPrivateGStreamerBase::notifyPlayerOfVolumeChange, this)));
}
MediaPlayer::NetworkState MediaPlayerPrivateGStreamerBase::networkState() const
{
return m_networkState;
}
MediaPlayer::ReadyState MediaPlayerPrivateGStreamerBase::readyState() const
{
return m_readyState;
}
void MediaPlayerPrivateGStreamerBase::sizeChanged()
{
notImplemented();
}
void MediaPlayerPrivateGStreamerBase::setMuted(bool muted)
{
if (!m_volumeElement)
return;
g_object_set(m_volumeElement.get(), "mute", muted, NULL);
}
bool MediaPlayerPrivateGStreamerBase::muted() const
{
if (!m_volumeElement)
return false;
bool muted;
g_object_get(m_volumeElement.get(), "mute", &muted, NULL);
return muted;
}
void MediaPlayerPrivateGStreamerBase::notifyPlayerOfMute()
{
if (!m_player || !m_volumeElement)
return;
gboolean muted;
g_object_get(m_volumeElement.get(), "mute", &muted, NULL);
m_player->muteChanged(static_cast<bool>(muted));
}
void MediaPlayerPrivateGStreamerBase::muteChanged()
{
m_muteTimerHandler.schedule("[WebKit] MediaPlayerPrivateGStreamerBase::muteChanged", std::function<void()>(std::bind(&MediaPlayerPrivateGStreamerBase::notifyPlayerOfMute, this)));
}
#if USE(TEXTURE_MAPPER_GL) && !USE(COORDINATED_GRAPHICS)
PassRefPtr<BitmapTexture> MediaPlayerPrivateGStreamerBase::updateTexture(TextureMapper* textureMapper)
{
WTF::GMutexLocker<GMutex> lock(m_sampleMutex);
if (!GST_IS_SAMPLE(m_sample.get()))
return nullptr;
GstCaps* caps = gst_sample_get_caps(m_sample.get());
if (!caps)
return nullptr;
GstVideoInfo videoInfo;
gst_video_info_init(&videoInfo);
if (!gst_video_info_from_caps(&videoInfo, caps))
return nullptr;
IntSize size = IntSize(GST_VIDEO_INFO_WIDTH(&videoInfo), GST_VIDEO_INFO_HEIGHT(&videoInfo));
RefPtr<BitmapTexture> texture = textureMapper->acquireTextureFromPool(size, GST_VIDEO_INFO_HAS_ALPHA(&videoInfo) ? BitmapTexture::SupportsAlpha : BitmapTexture::NoFlag);
GstBuffer* buffer = gst_sample_get_buffer(m_sample.get());
#if GST_CHECK_VERSION(1, 1, 0)
GstVideoGLTextureUploadMeta* meta;
if ((meta = gst_buffer_get_video_gl_texture_upload_meta(buffer))) {
if (meta->n_textures == 1) { const BitmapTextureGL* textureGL = static_cast<const BitmapTextureGL*>(texture.get());
guint ids[4] = { textureGL->id(), 0, 0, 0 };
if (gst_video_gl_texture_upload_meta_upload(meta, ids))
return texture;
}
}
#endif
ASSERT(GST_VIDEO_INFO_N_PLANES(&videoInfo) == 1);
GstVideoFrame videoFrame;
if (!gst_video_frame_map(&videoFrame, &videoInfo, buffer, GST_MAP_READ))
return nullptr;
int stride = GST_VIDEO_FRAME_PLANE_STRIDE(&videoFrame, 0);
const void* srcData = GST_VIDEO_FRAME_PLANE_DATA(&videoFrame, 0);
texture->updateContents(srcData, WebCore::IntRect(WebCore::IntPoint(0, 0), size), WebCore::IntPoint(0, 0), stride, BitmapTexture::UpdateCannotModifyOriginalImageData);
gst_video_frame_unmap(&videoFrame);
return texture;
}
#endif
void MediaPlayerPrivateGStreamerBase::triggerRepaint(GstSample* sample)
{
{
WTF::GMutexLocker<GMutex> lock(m_sampleMutex);
m_sample = sample;
}
#if USE(GSTREAMER_GL)
{
ASSERT(!isMainThread());
WTF::GMutexLocker<GMutex> lock(m_drawMutex);
m_drawTimerHandler.schedule("[WebKit] video render", [this] {
WTF::GMutexLocker<GMutex> lock(m_drawMutex);
#if USE(TEXTURE_MAPPER_GL) && !USE(COORDINATED_GRAPHICS)
if (supportsAcceleratedRendering() && m_player->client().mediaPlayerRenderingCanBeAccelerated(m_player) && client())
client()->setPlatformLayerNeedsDisplay();
g_cond_signal(&m_drawCondition);
#endif
});
g_cond_wait(&m_drawCondition, &m_drawMutex);
}
#elif USE(TEXTURE_MAPPER_GL) && !USE(COORDINATED_GRAPHICS)
if (supportsAcceleratedRendering() && m_player->client().mediaPlayerRenderingCanBeAccelerated(m_player) && client()) {
client()->setPlatformLayerNeedsDisplay();
return;
}
#endif
m_player->repaint();
}
void MediaPlayerPrivateGStreamerBase::setSize(const IntSize& size)
{
m_size = size;
}
void MediaPlayerPrivateGStreamerBase::paint(GraphicsContext* context, const FloatRect& rect)
{
#if USE(TEXTURE_MAPPER_GL) && !USE(COORDINATED_GRAPHICS)
if (client())
return;
#endif
if (context->paintingDisabled())
return;
if (!m_player->visible())
return;
WTF::GMutexLocker<GMutex> lock(m_sampleMutex);
if (!GST_IS_SAMPLE(m_sample.get()))
return;
RefPtr<ImageGStreamer> gstImage = ImageGStreamer::createImage(m_sample.get());
if (!gstImage)
return;
context->drawImage(reinterpret_cast<Image*>(gstImage->image().get()), ColorSpaceSRGB,
rect, gstImage->rect(), CompositeCopy);
}
#if USE(TEXTURE_MAPPER_GL) && !USE(COORDINATED_GRAPHICS)
void MediaPlayerPrivateGStreamerBase::paintToTextureMapper(TextureMapper* textureMapper, const FloatRect& targetRect, const TransformationMatrix& matrix, float opacity)
{
if (!m_player->visible())
return;
if (m_usingFallbackVideoSink) {
if (RefPtr<BitmapTexture> texture = updateTexture(textureMapper))
textureMapper->drawTexture(*texture.get(), targetRect, matrix, opacity);
return;
}
#if USE(GSTREAMER_GL)
if (!GST_IS_SAMPLE(m_sample.get()))
return;
GstCaps* caps = gst_sample_get_caps(m_sample.get());
if (!caps)
return;
GstVideoInfo videoInfo;
gst_video_info_init(&videoInfo);
if (!gst_video_info_from_caps(&videoInfo, caps))
return;
GstBuffer* buffer = gst_sample_get_buffer(m_sample.get());
GstVideoFrame videoFrame;
if (!gst_video_frame_map(&videoFrame, &videoInfo, buffer, static_cast<GstMapFlags>(GST_MAP_READ | GST_MAP_GL)))
return;
unsigned textureID = *reinterpret_cast<unsigned*>(videoFrame.data[0]);
BitmapTexture::Flags flags = BitmapTexture::NoFlag;
if (GST_VIDEO_INFO_HAS_ALPHA(&videoInfo))
flags |= BitmapTexture::SupportsAlpha;
IntSize size = IntSize(GST_VIDEO_INFO_WIDTH(&videoInfo), GST_VIDEO_INFO_HEIGHT(&videoInfo));
TextureMapperGL* textureMapperGL = reinterpret_cast<TextureMapperGL*>(textureMapper);
textureMapperGL->drawTexture(textureID, flags, size, targetRect, matrix, opacity);
gst_video_frame_unmap(&videoFrame);
#endif
}
#endif
bool MediaPlayerPrivateGStreamerBase::supportsFullscreen() const
{
return true;
}
PlatformMedia MediaPlayerPrivateGStreamerBase::platformMedia() const
{
return NoPlatformMedia;
}
MediaPlayer::MovieLoadType MediaPlayerPrivateGStreamerBase::movieLoadType() const
{
if (m_readyState == MediaPlayer::HaveNothing)
return MediaPlayer::Unknown;
if (isLiveStream())
return MediaPlayer::LiveStream;
return MediaPlayer::Download;
}
GstElement* MediaPlayerPrivateGStreamerBase::createVideoSink()
{
GstElement* videoSink = nullptr;
#if USE(GSTREAMER_GL)
if (webkitGstCheckVersion(1, 5, 0)) {
m_videoSink = gst_element_factory_make("glimagesink", nullptr);
if (m_videoSink) {
m_repaintHandler = g_signal_connect(m_videoSink.get(), "client-draw", G_CALLBACK(mediaPlayerPrivateDrawCallback), this);
videoSink = m_videoSink.get();
}
}
#endif
if (!m_videoSink) {
m_usingFallbackVideoSink = true;
m_videoSink = webkitVideoSinkNew();
m_repaintHandler = g_signal_connect(m_videoSink.get(), "repaint-requested", G_CALLBACK(mediaPlayerPrivateRepaintCallback), this);
}
m_fpsSink = gst_element_factory_make("fpsdisplaysink", "sink");
if (m_fpsSink) {
g_object_set(m_fpsSink.get(), "silent", TRUE , nullptr);
#if LOG_DISABLED
g_object_set(m_fpsSink.get(), "text-overlay", FALSE , nullptr);
#else
if (!isLogChannelEnabled("Media"))
g_object_set(m_fpsSink.get(), "text-overlay", FALSE , nullptr);
#endif // LOG_DISABLED
if (g_object_class_find_property(G_OBJECT_GET_CLASS(m_fpsSink.get()), "video-sink")) {
g_object_set(m_fpsSink.get(), "video-sink", m_videoSink.get(), nullptr);
videoSink = m_fpsSink.get();
} else
m_fpsSink = nullptr;
}
if (!m_fpsSink)
videoSink = m_videoSink.get();
ASSERT(videoSink);
return videoSink;
}
void MediaPlayerPrivateGStreamerBase::setStreamVolumeElement(GstStreamVolume* volume)
{
ASSERT(!m_volumeElement);
m_volumeElement = volume;
if (!m_player->platformVolumeConfigurationRequired()) {
LOG_MEDIA_MESSAGE("Setting stream volume to %f", m_player->volume());
g_object_set(m_volumeElement.get(), "volume", m_player->volume(), NULL);
} else
LOG_MEDIA_MESSAGE("Not setting stream volume, trusting system one");
LOG_MEDIA_MESSAGE("Setting stream muted %d", m_player->muted());
g_object_set(m_volumeElement.get(), "mute", m_player->muted(), NULL);
m_volumeSignalHandler = g_signal_connect(m_volumeElement.get(), "notify::volume", G_CALLBACK(mediaPlayerPrivateVolumeChangedCallback), this);
m_muteSignalHandler = g_signal_connect(m_volumeElement.get(), "notify::mute", G_CALLBACK(mediaPlayerPrivateMuteChangedCallback), this);
}
unsigned MediaPlayerPrivateGStreamerBase::decodedFrameCount() const
{
guint64 decodedFrames = 0;
if (m_fpsSink)
g_object_get(m_fpsSink.get(), "frames-rendered", &decodedFrames, NULL);
return static_cast<unsigned>(decodedFrames);
}
unsigned MediaPlayerPrivateGStreamerBase::droppedFrameCount() const
{
guint64 framesDropped = 0;
if (m_fpsSink)
g_object_get(m_fpsSink.get(), "frames-dropped", &framesDropped, NULL);
return static_cast<unsigned>(framesDropped);
}
unsigned MediaPlayerPrivateGStreamerBase::audioDecodedByteCount() const
{
GstQuery* query = gst_query_new_position(GST_FORMAT_BYTES);
gint64 position = 0;
if (audioSink() && gst_element_query(audioSink(), query))
gst_query_parse_position(query, 0, &position);
gst_query_unref(query);
return static_cast<unsigned>(position);
}
unsigned MediaPlayerPrivateGStreamerBase::videoDecodedByteCount() const
{
GstQuery* query = gst_query_new_position(GST_FORMAT_BYTES);
gint64 position = 0;
if (gst_element_query(m_videoSink.get(), query))
gst_query_parse_position(query, 0, &position);
gst_query_unref(query);
return static_cast<unsigned>(position);
}
}
#endif // USE(GSTREAMER)