MediaPlayerPrivateGStreamer.h [plain text]
#ifndef MediaPlayerPrivateGStreamer_h
#define MediaPlayerPrivateGStreamer_h
#if ENABLE(VIDEO) && USE(GSTREAMER)
#include "GRefPtrGStreamer.h"
#include "MediaPlayerPrivateGStreamerBase.h"
#include "Timer.h"
#include <glib.h>
#include <gst/gst.h>
#include <gst/pbutils/install-plugins.h>
#include <wtf/Forward.h>
#include <wtf/gobject/GMainLoopSource.h>
#if ENABLE(VIDEO_TRACK) && USE(GSTREAMER_MPEGTS)
#include <wtf/text/AtomicStringHash.h>
#endif
#if ENABLE(MEDIA_SOURCE)
#include "MediaSourceGStreamer.h"
#endif
typedef struct _GstBuffer GstBuffer;
typedef struct _GstMessage GstMessage;
typedef struct _GstElement GstElement;
typedef struct _GstMpegTsSection GstMpegTsSection;
namespace WebCore {
class AudioTrackPrivateGStreamer;
class InbandMetadataTextTrackPrivateGStreamer;
class InbandTextTrackPrivateGStreamer;
class VideoTrackPrivateGStreamer;
class MediaPlayerPrivateGStreamer : public MediaPlayerPrivateGStreamerBase {
public:
~MediaPlayerPrivateGStreamer();
static void registerMediaEngine(MediaEngineRegistrar);
gboolean handleMessage(GstMessage*);
void handlePluginInstallerResult(GstInstallPluginsReturn);
bool hasVideo() const { return m_hasVideo; }
bool hasAudio() const { return m_hasAudio; }
void load(const String &url);
#if ENABLE(MEDIA_SOURCE)
void load(const String& url, MediaSourcePrivateClient*);
#endif
void commitLoad();
void cancelLoad();
void prepareToPlay();
void play();
void pause();
bool paused() const;
bool seeking() const;
float duration() const;
float currentTime() const;
void seek(float);
void setRate(float);
void setPreservesPitch(bool);
void setPreload(MediaPlayer::Preload);
void fillTimerFired(Timer<MediaPlayerPrivateGStreamer>*);
std::unique_ptr<PlatformTimeRanges> buffered() const;
float maxTimeSeekable() const;
bool didLoadingProgress() const;
unsigned totalBytes() const;
float maxTimeLoaded() const;
void loadStateChanged();
void timeChanged();
void didEnd();
void durationChanged();
void loadingFailed(MediaPlayer::NetworkState);
void videoChanged();
void videoCapsChanged();
void audioChanged();
void notifyPlayerOfVideo();
void notifyPlayerOfVideoCaps();
void notifyPlayerOfAudio();
#if ENABLE(VIDEO_TRACK)
void textChanged();
void notifyPlayerOfText();
void newTextSample();
void notifyPlayerOfNewTextSample();
#endif
void sourceChanged();
GstElement* audioSink() const;
void setAudioStreamProperties(GObject*);
void simulateAudioInterruption();
bool changePipelineState(GstState);
private:
MediaPlayerPrivateGStreamer(MediaPlayer*);
static PassOwnPtr<MediaPlayerPrivateInterface> create(MediaPlayer*);
static void getSupportedTypes(HashSet<String>&);
static MediaPlayer::SupportsType supportsType(const MediaEngineSupportParameters&);
static bool isAvailable();
GstElement* createAudioSink();
float playbackPosition() const;
void cacheDuration();
void updateStates();
void asyncStateChangeDone();
void createGSTPlayBin();
bool loadNextLocation();
void mediaLocationChanged(GstMessage*);
void setDownloadBuffering();
void processBufferingStats(GstMessage*);
#if ENABLE(VIDEO_TRACK) && USE(GSTREAMER_MPEGTS)
void processMpegTsSection(GstMpegTsSection*);
#endif
#if ENABLE(VIDEO_TRACK)
void processTableOfContents(GstMessage*);
void processTableOfContentsEntry(GstTocEntry*, GstTocEntry* parent);
#endif
bool doSeek(gint64 position, float rate, GstSeekFlags seekType);
void updatePlaybackRate();
virtual String engineDescription() const { return "GStreamer"; }
virtual bool isLiveStream() const { return m_isStreaming; }
virtual bool didPassCORSAccessCheck() const;
private:
GRefPtr<GstElement> m_playBin;
GRefPtr<GstElement> m_source;
#if ENABLE(VIDEO_TRACK)
GRefPtr<GstElement> m_textAppSink;
GRefPtr<GstPad> m_textAppSinkPad;
#endif
float m_seekTime;
bool m_changingRate;
float m_endTime;
bool m_isEndReached;
mutable bool m_isStreaming;
GstStructure* m_mediaLocations;
int m_mediaLocationCurrentIndex;
bool m_resetPipeline;
bool m_paused;
bool m_playbackRatePause;
bool m_seeking;
bool m_seekIsPending;
float m_timeOfOverlappingSeek;
bool m_canFallBackToLastFinishedSeekPosition;
bool m_buffering;
float m_playbackRate;
float m_lastPlaybackRate;
bool m_errorOccured;
mutable gfloat m_mediaDuration;
bool m_downloadFinished;
Timer<MediaPlayerPrivateGStreamer> m_fillTimer;
float m_maxTimeLoaded;
int m_bufferingPercentage;
MediaPlayer::Preload m_preload;
bool m_delayingLoad;
bool m_mediaDurationKnown;
mutable float m_maxTimeLoadedAtLastDidLoadingProgress;
bool m_volumeAndMuteInitialized;
bool m_hasVideo;
bool m_hasAudio;
GMainLoopSource m_audioTimerHandler;
GMainLoopSource m_textTimerHandler;
GMainLoopSource m_videoTimerHandler;
GMainLoopSource m_videoCapsTimerHandler;
GMainLoopSource m_readyTimerHandler;
mutable long m_totalBytes;
URL m_url;
bool m_preservesPitch;
GstState m_requestedState;
GRefPtr<GstElement> m_autoAudioSink;
bool m_missingPlugins;
#if ENABLE(VIDEO_TRACK)
Vector<RefPtr<AudioTrackPrivateGStreamer>> m_audioTracks;
Vector<RefPtr<InbandTextTrackPrivateGStreamer>> m_textTracks;
Vector<RefPtr<VideoTrackPrivateGStreamer>> m_videoTracks;
RefPtr<InbandMetadataTextTrackPrivateGStreamer> m_chaptersTrack;
#endif
#if ENABLE(VIDEO_TRACK) && USE(GSTREAMER_MPEGTS)
HashMap<AtomicString, RefPtr<InbandMetadataTextTrackPrivateGStreamer>> m_metadataTracks;
#endif
#if ENABLE(MEDIA_SOURCE)
RefPtr<MediaSourcePrivateClient> m_mediaSource;
#endif
};
}
#endif // USE(GSTREAMER)
#endif