#pragma once
#include "ActiveDOMObject.h"
#include "AsyncAudioDecoder.h"
#include "AudioBus.h"
#include "AudioDestinationNode.h"
#include "EventTarget.h"
#include "JSDOMPromiseDeferred.h"
#include "MediaCanStartListener.h"
#include "MediaProducer.h"
#include "PlatformMediaSession.h"
#include "VisibilityChangeClient.h"
#include <JavaScriptCore/Float32Array.h>
#include <atomic>
#include <wtf/HashSet.h>
#include <wtf/MainThread.h>
#include <wtf/RefPtr.h>
#include <wtf/ThreadSafeRefCounted.h>
#include <wtf/Threading.h>
#include <wtf/Vector.h>
#include <wtf/text/AtomicStringHash.h>
namespace WebCore {
class AnalyserNode;
class AudioBuffer;
class AudioBufferCallback;
class AudioBufferSourceNode;
class AudioListener;
class AudioSummingJunction;
class BiquadFilterNode;
class ChannelMergerNode;
class ChannelSplitterNode;
class ConvolverNode;
class DelayNode;
class Document;
class DynamicsCompressorNode;
class GainNode;
class GenericEventQueue;
class HTMLMediaElement;
class MediaElementAudioSourceNode;
class MediaStream;
class MediaStreamAudioDestinationNode;
class MediaStreamAudioSourceNode;
class OscillatorNode;
class PannerNode;
class PeriodicWave;
class ScriptProcessorNode;
class WaveShaperNode;
class AudioContext : public ActiveDOMObject, public ThreadSafeRefCounted<AudioContext>, public EventTargetWithInlineData, public MediaCanStartListener, public MediaProducer, private PlatformMediaSessionClient, private VisibilityChangeClient {
public:
static RefPtr<AudioContext> create(Document&);
virtual ~AudioContext();
bool isInitialized() const;
bool isOfflineContext() const { return m_isOfflineContext; }
Document* document() const;
Document* hostingDocument() const final;
AudioDestinationNode* destination() { return m_destinationNode.get(); }
size_t currentSampleFrame() const { return m_destinationNode->currentSampleFrame(); }
double currentTime() const { return m_destinationNode->currentTime(); }
float sampleRate() const { return m_destinationNode->sampleRate(); }
unsigned long activeSourceCount() const { return static_cast<unsigned long>(m_activeSourceCount); }
void incrementActiveSourceCount();
void decrementActiveSourceCount();
ExceptionOr<Ref<AudioBuffer>> createBuffer(unsigned numberOfChannels, size_t numberOfFrames, float sampleRate);
ExceptionOr<Ref<AudioBuffer>> createBuffer(ArrayBuffer&, bool mixToMono);
void decodeAudioData(Ref<ArrayBuffer>&&, RefPtr<AudioBufferCallback>&&, RefPtr<AudioBufferCallback>&&);
AudioListener* listener() { return m_listener.get(); }
using ActiveDOMObject::suspend;
using ActiveDOMObject::resume;
void suspend(DOMPromiseDeferred<void>&&);
void resume(DOMPromiseDeferred<void>&&);
void close(DOMPromiseDeferred<void>&&);
enum class State { Suspended, Running, Interrupted, Closed };
State state() const;
bool wouldTaintOrigin(const URL&) const;
Ref<AudioBufferSourceNode> createBufferSource();
#if ENABLE(VIDEO)
ExceptionOr<Ref<MediaElementAudioSourceNode>> createMediaElementSource(HTMLMediaElement&);
#endif
#if ENABLE(MEDIA_STREAM)
ExceptionOr<Ref<MediaStreamAudioSourceNode>> createMediaStreamSource(MediaStream&);
Ref<MediaStreamAudioDestinationNode> createMediaStreamDestination();
#endif
Ref<GainNode> createGain();
Ref<BiquadFilterNode> createBiquadFilter();
Ref<WaveShaperNode> createWaveShaper();
ExceptionOr<Ref<DelayNode>> createDelay(double maxDelayTime);
Ref<PannerNode> createPanner();
Ref<ConvolverNode> createConvolver();
Ref<DynamicsCompressorNode> createDynamicsCompressor();
Ref<AnalyserNode> createAnalyser();
ExceptionOr<Ref<ScriptProcessorNode>> createScriptProcessor(size_t bufferSize, size_t numberOfInputChannels, size_t numberOfOutputChannels);
ExceptionOr<Ref<ChannelSplitterNode>> createChannelSplitter(size_t numberOfOutputs);
ExceptionOr<Ref<ChannelMergerNode>> createChannelMerger(size_t numberOfInputs);
Ref<OscillatorNode> createOscillator();
ExceptionOr<Ref<PeriodicWave>> createPeriodicWave(Float32Array& real, Float32Array& imaginary);
void notifyNodeFinishedProcessing(AudioNode*);
void handlePreRenderTasks();
void handlePostRenderTasks();
void derefFinishedSourceNodes();
void markForDeletion(AudioNode*);
void deleteMarkedNodes();
void addAutomaticPullNode(AudioNode*);
void removeAutomaticPullNode(AudioNode*);
void processAutomaticPullNodes(size_t framesToProcess);
void incrementConnectionCount()
{
ASSERT(isMainThread());
m_connectionCount++;
}
unsigned connectionCount() const { return m_connectionCount; }
void setAudioThread(Thread& thread) { m_audioThread = &thread; } Thread* audioThread() const { return m_audioThread; }
bool isAudioThread() const;
bool isAudioThreadFinished() { return m_isAudioThreadFinished; }
void lock(bool& mustReleaseLock);
bool tryLock(bool& mustReleaseLock);
void unlock();
bool isGraphOwner() const;
static unsigned maxNumberOfChannels() { return MaxNumberOfChannels; }
class AutoLocker {
public:
explicit AutoLocker(AudioContext& context)
: m_context(context)
{
m_context.lock(m_mustReleaseLock);
}
~AutoLocker()
{
if (m_mustReleaseLock)
m_context.unlock();
}
private:
AudioContext& m_context;
bool m_mustReleaseLock;
};
void addDeferredFinishDeref(AudioNode*);
void handleDeferredFinishDerefs();
void markSummingJunctionDirty(AudioSummingJunction*);
void markAudioNodeOutputDirty(AudioNodeOutput*);
void removeMarkedSummingJunction(AudioSummingJunction*);
EventTargetInterface eventTargetInterface() const final { return AudioContextEventTargetInterfaceType; }
ScriptExecutionContext* scriptExecutionContext() const final;
using ThreadSafeRefCounted::ref;
using ThreadSafeRefCounted::deref;
void startRendering();
void fireCompletionEvent();
static unsigned s_hardwareContextCount;
enum BehaviorRestrictionFlags {
NoRestrictions = 0,
RequireUserGestureForAudioStartRestriction = 1 << 0,
RequirePageConsentForAudioStartRestriction = 1 << 1,
};
typedef unsigned BehaviorRestrictions;
BehaviorRestrictions behaviorRestrictions() const { return m_restrictions; }
void addBehaviorRestriction(BehaviorRestrictions restriction) { m_restrictions |= restriction; }
void removeBehaviorRestriction(BehaviorRestrictions restriction) { m_restrictions &= ~restriction; }
void isPlayingAudioDidChange();
void nodeWillBeginPlayback();
protected:
explicit AudioContext(Document&);
AudioContext(Document&, unsigned numberOfChannels, size_t numberOfFrames, float sampleRate);
static bool isSampleRateRangeGood(float sampleRate);
private:
void constructCommon();
void lazyInitialize();
void uninitialize();
bool willBeginPlayback();
bool willPausePlayback();
bool userGestureRequiredForAudioStart() const { return !isOfflineContext() && m_restrictions & RequireUserGestureForAudioStartRestriction; }
bool pageConsentRequiredForAudioStart() const { return !isOfflineContext() && m_restrictions & RequirePageConsentForAudioStartRestriction; }
void setState(State);
void clear();
void scheduleNodeDeletion();
void mediaCanStart(Document&) override;
MediaProducer::MediaStateFlags mediaState() const override;
void pageMutedStateDidChange() override;
void refNode(AudioNode&);
void derefNode(AudioNode&);
void stop() override;
bool canSuspendForDocumentSuspension() const override;
const char* activeDOMObjectName() const override;
void derefUnfinishedSourceNodes();
PlatformMediaSession::MediaType mediaType() const override { return PlatformMediaSession::WebAudio; }
PlatformMediaSession::MediaType presentationType() const override { return PlatformMediaSession::WebAudio; }
PlatformMediaSession::CharacteristicsFlags characteristics() const override { return m_state == State::Running ? PlatformMediaSession::HasAudio : PlatformMediaSession::HasNothing; }
void mayResumePlayback(bool shouldResume) override;
void suspendPlayback() override;
bool canReceiveRemoteControlCommands() const override { return false; }
void didReceiveRemoteControlCommand(PlatformMediaSession::RemoteControlCommandType, const PlatformMediaSession::RemoteCommandArgument*) override { }
bool supportsSeeking() const override { return false; }
bool shouldOverrideBackgroundPlaybackRestriction(PlatformMediaSession::InterruptionType) const override { return false; }
String sourceApplicationIdentifier() const override;
bool canProduceAudio() const final { return true; }
bool isSuspended() const final;
bool processingUserGestureForMedia() const final;
void visibilityStateChanged() final;
void refEventTarget() override { ref(); }
void derefEventTarget() override { deref(); }
void handleDirtyAudioSummingJunctions();
void handleDirtyAudioNodeOutputs();
void addReaction(State, DOMPromiseDeferred<void>&&);
void updateAutomaticPullNodes();
Vector<AudioNode*> m_finishedNodes;
Vector<AudioNode*> m_referencedNodes;
Vector<AudioNode*> m_nodesMarkedForDeletion;
Vector<AudioNode*> m_nodesToDelete;
bool m_isDeletionScheduled { false };
bool m_isStopScheduled { false };
bool m_isInitialized { false };
bool m_isAudioThreadFinished { false };
bool m_automaticPullNodesNeedUpdating { false };
bool m_isOfflineContext { false };
HashSet<AudioSummingJunction*> m_dirtySummingJunctions;
HashSet<AudioNodeOutput*> m_dirtyAudioNodeOutputs;
HashSet<AudioNode*> m_automaticPullNodes;
Vector<AudioNode*> m_renderingAutomaticPullNodes;
Vector<AudioNode*> m_deferredFinishDerefList;
Vector<Vector<DOMPromiseDeferred<void>>> m_stateReactions;
std::unique_ptr<PlatformMediaSession> m_mediaSession;
std::unique_ptr<GenericEventQueue> m_eventQueue;
RefPtr<AudioBuffer> m_renderTarget;
RefPtr<AudioDestinationNode> m_destinationNode;
RefPtr<AudioListener> m_listener;
unsigned m_connectionCount { 0 };
Lock m_contextGraphMutex;
Thread* volatile m_audioThread { nullptr };
Thread* volatile m_graphOwnerThread { nullptr };
AsyncAudioDecoder m_audioDecoder;
enum { MaxNumberOfChannels = 32 };
std::atomic<int> m_activeSourceCount { 0 };
BehaviorRestrictions m_restrictions { NoRestrictions };
State m_state { State::Suspended };
};
inline bool operator==(const AudioContext& lhs, const AudioContext& rhs)
{
return &lhs == &rhs;
}
inline bool operator!=(const AudioContext& lhs, const AudioContext& rhs)
{
return &lhs != &rhs;
}
inline AudioContext::State AudioContext::state() const
{
return m_state;
}
}