MockAudioSharedUnit.h [plain text]
#pragma once
#if ENABLE(MEDIA_STREAM)
#include "BaseAudioSharedUnit.h"
#include <CoreAudio/CoreAudioTypes.h>
#include <wtf/RunLoop.h>
#include <wtf/Vector.h>
#include <wtf/WorkQueue.h>
OBJC_CLASS AVAudioPCMBuffer;
typedef struct OpaqueCMClock* CMClockRef;
typedef const struct opaqueCMFormatDescription* CMFormatDescriptionRef;
namespace WebCore {
class WebAudioBufferList;
class WebAudioSourceProviderCocoa;
class MockAudioSharedUnit final : public BaseAudioSharedUnit {
public:
WEBCORE_EXPORT static MockAudioSharedUnit& singleton();
MockAudioSharedUnit();
void setDeviceID(const String& deviceID) { m_deviceID = deviceID; }
private:
bool hasAudioUnit() const final;
void setCaptureDevice(String&&, uint32_t) final;
OSStatus reconfigureAudioUnit() final;
void resetSampleRate() final;
void cleanupAudioUnit() final;
OSStatus startInternal() final;
void stopInternal() final;
bool isProducingData() const final;
void delaySamples(Seconds) final;
CapabilityValueOrRange sampleRateCapacities() const final { return CapabilityValueOrRange(44100, 48000); }
void tick();
void render(Seconds);
void emitSampleBuffers(uint32_t frameCount);
void reconfigure();
static Seconds renderInterval() { return 60_ms; }
std::unique_ptr<WebAudioBufferList> m_audioBufferList;
uint32_t m_maximiumFrameCount;
uint64_t m_samplesEmitted { 0 };
uint64_t m_samplesRendered { 0 };
RetainPtr<CMFormatDescriptionRef> m_formatDescription;
AudioStreamBasicDescription m_streamFormat;
Vector<float> m_bipBopBuffer;
bool m_hasAudioUnit { false };
RunLoop::Timer<MockAudioSharedUnit> m_timer;
MonotonicTime m_lastRenderTime { MonotonicTime::nan() };
MonotonicTime m_delayUntil;
Ref<WorkQueue> m_workQueue;
unsigned m_channelCount { 2 };
String m_deviceID;
};
}
#endif // ENABLE(MEDIA_STREAM)