| File: | Volumes/Data/worker/macOS-Safer-CPP-Checks-EWS/build/Source/WebCore/platform/graphics/cocoa/MediaPlayerPrivateWebM.mm |
| Warning: | line 762, column 45 Call argument is uncounted and unsafe |
Press '?' to see keyboard shortcuts
Keyboard shortcuts:
| 1 | /* |
| 2 | * Copyright (C) 2022-2025 Apple Inc. All rights reserved. |
| 3 | * |
| 4 | * Redistribution and use in source and binary forms, with or without |
| 5 | * modification, are permitted provided that the following conditions |
| 6 | * are met: |
| 7 | * 1. Redistributions of source code must retain the above copyright |
| 8 | * notice, this list of conditions and the following disclaimer. |
| 9 | * 2. Redistributions in binary form must reproduce the above copyright |
| 10 | * notice, this list of conditions and the following disclaimer in the |
| 11 | * documentation and/or other materials provided with the distribution. |
| 12 | * |
| 13 | * THIS SOFTWARE IS PROVIDED BY APPLE INC. AND ITS CONTRIBUTORS ``AS IS'' |
| 14 | * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, |
| 15 | * THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR |
| 16 | * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR ITS CONTRIBUTORS |
| 17 | * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR |
| 18 | * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF |
| 19 | * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS |
| 20 | * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN |
| 21 | * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) |
| 22 | * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF |
| 23 | * THE POSSIBILITY OF SUCH DAMAGE. |
| 24 | */ |
| 25 | |
| 26 | #import "config.h" |
| 27 | #import "MediaPlayerPrivateWebM.h" |
| 28 | |
| 29 | #if ENABLE(COCOA_WEBM_PLAYER)(defined 1 && 1) |
| 30 | |
| 31 | #import "AudioMediaStreamTrackRenderer.h" |
| 32 | #import "AudioTrackPrivateWebM.h" |
| 33 | #import "AudioVideoRendererAVFObjC.h" |
| 34 | #import "FloatSize.h" |
| 35 | #import "GraphicsContext.h" |
| 36 | #import "GraphicsContextStateSaver.h" |
| 37 | #import "Logging.h" |
| 38 | #import "MediaPlaybackTarget.h" |
| 39 | #import "MediaPlayer.h" |
| 40 | #import "MediaSampleAVFObjC.h" |
| 41 | #import "MediaStrategy.h" |
| 42 | #import "NativeImage.h" |
| 43 | #import "NotImplemented.h" |
| 44 | #import "PixelBufferConformerCV.h" |
| 45 | #import "PlatformDynamicRangeLimitCocoa.h" |
| 46 | #import "PlatformMediaResourceLoader.h" |
| 47 | #import "PlatformStrategies.h" |
| 48 | #import "ResourceError.h" |
| 49 | #import "ResourceRequest.h" |
| 50 | #import "ResourceResponse.h" |
| 51 | #import "SampleMap.h" |
| 52 | #import "SecurityOrigin.h" |
| 53 | #import "ShareableBitmap.h" |
| 54 | #import "TrackBuffer.h" |
| 55 | #import "VP9UtilitiesCocoa.h" |
| 56 | #import "VideoFrameCV.h" |
| 57 | #import "VideoTrackPrivateWebM.h" |
| 58 | #import "WebMResourceClient.h" |
| 59 | #import <AVFoundation/AVFoundation.h> |
| 60 | #import <pal/spi/cocoa/QuartzCoreSPI.h> |
| 61 | #import <wtf/MainThread.h> |
| 62 | #import <wtf/NativePromise.h> |
| 63 | #import <wtf/SoftLinking.h> |
| 64 | #import <wtf/TZoneMallocInlines.h> |
| 65 | #import <wtf/WorkQueue.h> |
| 66 | |
| 67 | #pragma mark - Soft Linking |
| 68 | #import "CoreVideoSoftLink.h" |
| 69 | #import "VideoToolboxSoftLink.h" |
| 70 | #import <pal/cf/CoreMediaSoftLink.h> |
| 71 | #import <pal/cocoa/AVFoundationSoftLink.h> |
| 72 | |
| 73 | #pragma mark - |
| 74 | |
| 75 | namespace WebCore { |
| 76 | |
| 77 | using TrackType = TrackInfo::TrackType; |
| 78 | |
| 79 | WTF_MAKE_TZONE_ALLOCATED_IMPL(MediaPlayerPrivateWebM)::bmalloc::api::HeapRef MediaPlayerPrivateWebM::s_heapRef; const TZoneSpecification MediaPlayerPrivateWebM::s_heapSpec = { & MediaPlayerPrivateWebM::s_heapRef, sizeof(MediaPlayerPrivateWebM ), ::bmalloc::api::compactAllocationMode<MediaPlayerPrivateWebM >(), SizeAndAlignment::encode<MediaPlayerPrivateWebM> () }; void* MediaPlayerPrivateWebM::operatorNewSlow(size_t size ) { if constexpr (::bmalloc::api::compactAllocationMode<MediaPlayerPrivateWebM >() == CompactAllocationMode::Compact) return ::bmalloc::api ::tzoneAllocateCompactSlow(size, s_heapSpec); return ::bmalloc ::api::tzoneAllocateNonCompactSlow(size, s_heapSpec); } using __makeBtzoneMallocedInlineMacroSemicolonifier __attribute__( (unused)) = int; |
| 80 | |
| 81 | static const MediaTime discontinuityTolerance = MediaTime(1, 1); |
| 82 | |
| 83 | Ref<AudioVideoRenderer> MediaPlayerPrivateWebM::createRenderer(LoggerHelper& loggerHelper, HTMLMediaElementIdentifier mediaElementIdentifier, MediaPlayerIdentifier playerIdentifier) |
| 84 | { |
| 85 | if (hasPlatformStrategies()) { |
| 86 | if (RefPtr renderer = platformStrategies()->mediaStrategy()->createAudioVideoRenderer(&loggerHelper, mediaElementIdentifier, playerIdentifier)) |
| 87 | return renderer.releaseNonNull(); |
| 88 | } |
| 89 | return AudioVideoRendererAVFObjC::create(Ref { loggerHelper.logger() }, loggerHelper.logIdentifier()); |
| 90 | } |
| 91 | |
| 92 | Ref<MediaPlayerPrivateWebM> MediaPlayerPrivateWebM::create(MediaPlayer& player) |
| 93 | { |
| 94 | return adoptRef(*new MediaPlayerPrivateWebM(player)); |
| 95 | } |
| 96 | |
| 97 | MediaPlayerPrivateWebM::MediaPlayerPrivateWebM(MediaPlayer& player) |
| 98 | : m_player(player) |
| 99 | , m_parser(SourceBufferParserWebM::create().releaseNonNull()) |
| 100 | , m_appendQueue(WorkQueue::create("MediaPlayerPrivateWebM data parser queue"_s)) |
| 101 | , m_logger(player.mediaPlayerLogger()) |
| 102 | , m_logIdentifier(player.mediaPlayerLogIdentifier()) |
| 103 | , m_seekTimer(*this, &MediaPlayerPrivateWebM::seekInternal) |
| 104 | , m_rendererSeekRequest(NativePromiseRequest::create()) |
| 105 | , m_playerIdentifier(MediaPlayerIdentifier::generate()) |
| 106 | , m_renderer(createRenderer(*this, player.clientIdentifier(), m_playerIdentifier)) |
| 107 | { |
| 108 | ALWAYS_LOG(LOGIDENTIFIER)Ref { logger() }->logAlways(logChannel(), WTF::Logger::LogSiteIdentifier (logClassName(), __func__, logIdentifier())); |
| 109 | m_parser->setLogger(m_logger, m_logIdentifier); |
| 110 | m_parser->setDidParseInitializationDataCallback([weakThis = ThreadSafeWeakPtr { *this }, this] (InitializationSegment&& segment) { |
| 111 | if (RefPtr protectedThis = weakThis.get()) |
| 112 | didParseInitializationData(WTF::move(segment)); |
| 113 | }); |
| 114 | |
| 115 | m_parser->setDidProvideMediaDataCallback([weakThis = ThreadSafeWeakPtr { *this }, this] (Ref<MediaSampleAVFObjC>&& sample, TrackID trackId, const String& mediaType) { |
| 116 | if (RefPtr protectedThis = weakThis.get()) |
| 117 | didProvideMediaDataForTrackId(WTF::move(sample), trackId, mediaType); |
| 118 | }); |
| 119 | |
| 120 | #if HAVE(SPATIAL_TRACKING_LABEL)(defined HAVE_SPATIAL_TRACKING_LABEL && HAVE_SPATIAL_TRACKING_LABEL ) |
| 121 | m_defaultSpatialTrackingLabel = player.defaultSpatialTrackingLabel(); |
| 122 | m_spatialTrackingLabel = player.spatialTrackingLabel(); |
| 123 | #endif |
| 124 | } |
| 125 | |
| 126 | MediaPlayerPrivateWebM::~MediaPlayerPrivateWebM() |
| 127 | { |
| 128 | ALWAYS_LOG(LOGIDENTIFIER)Ref { logger() }->logAlways(logChannel(), WTF::Logger::LogSiteIdentifier (logClassName(), __func__, logIdentifier())); |
| 129 | |
| 130 | cancelPendingSeek(); |
| 131 | |
| 132 | clearTracks(); |
| 133 | |
| 134 | cancelLoad(); |
| 135 | } |
| 136 | |
| 137 | static HashSet<String>& mimeTypeCache() |
| 138 | { |
| 139 | static NeverDestroyed cache = HashSet<String>(); |
| 140 | if (cache->isEmpty()) |
| 141 | cache->addAll(SourceBufferParserWebM::supportedMIMETypes()); |
| 142 | return cache; |
| 143 | } |
| 144 | |
| 145 | void MediaPlayerPrivateWebM::getSupportedTypes(HashSet<String>& types) |
| 146 | { |
| 147 | types = mimeTypeCache(); |
| 148 | } |
| 149 | |
| 150 | MediaPlayer::SupportsType MediaPlayerPrivateWebM::supportsType(const MediaEngineSupportParameters& parameters) |
| 151 | { |
| 152 | if (parameters.isMediaSource || parameters.isMediaStream || parameters.requiresRemotePlayback) |
| 153 | return MediaPlayer::SupportsType::IsNotSupported; |
| 154 | |
| 155 | return SourceBufferParserWebM::isContentTypeSupported(parameters.type, parameters.supportsLimitedMatroska); |
| 156 | } |
| 157 | |
| 158 | void MediaPlayerPrivateWebM::setPreload(MediaPlayer::Preload preload) |
| 159 | { |
| 160 | ALWAYS_LOG(LOGIDENTIFIER, " - ", static_cast<int>(preload))Ref { logger() }->logAlways(logChannel(), WTF::Logger::LogSiteIdentifier (logClassName(), __func__, logIdentifier()), " - ", static_cast <int>(preload)); |
| 161 | if (preload == std::exchange(m_preload, preload)) |
| 162 | return; |
| 163 | doPreload(); |
| 164 | } |
| 165 | |
| 166 | void MediaPlayerPrivateWebM::doPreload() |
| 167 | { |
| 168 | if (m_assetURL.isEmpty() || m_networkState >= MediaPlayerNetworkState::FormatError) { |
| 169 | INFO_LOG(LOGIDENTIFIER, " - hasURL = ", static_cast<int>(m_assetURL.isEmpty()), " networkState = ", static_cast<int>(m_networkState))Ref { logger() }->info(logChannel(), WTF::Logger::LogSiteIdentifier (logClassName(), __func__, logIdentifier()), " - hasURL = ", static_cast <int>(m_assetURL.isEmpty()), " networkState = ", static_cast <int>(m_networkState)); |
| 170 | return; |
| 171 | } |
| 172 | |
| 173 | RefPtr player = m_player.get(); |
| 174 | if (!player) |
| 175 | return; |
| 176 | |
| 177 | auto mimeType = player->contentMIMEType(); |
| 178 | if (mimeType.isEmpty() || !mimeTypeCache().contains(mimeType)) { |
| 179 | ERROR_LOG(LOGIDENTIFIER, "mime type = ", mimeType, " not supported")Ref { logger() }->error(logChannel(), WTF::Logger::LogSiteIdentifier (logClassName(), __func__, logIdentifier()), "mime type = ", mimeType , " not supported"); |
| 180 | setNetworkState(MediaPlayer::NetworkState::FormatError); |
| 181 | return; |
| 182 | } |
| 183 | |
| 184 | if (m_preload >= MediaPlayer::Preload::MetaData && needsResourceClient()) { |
| 185 | if (!createResourceClientIfNeeded()) { |
| 186 | ERROR_LOG(LOGIDENTIFIER, "could not create resource client")Ref { logger() }->error(logChannel(), WTF::Logger::LogSiteIdentifier (logClassName(), __func__, logIdentifier()), "could not create resource client" ); |
| 187 | setNetworkState(MediaPlayer::NetworkState::NetworkError); |
| 188 | setReadyState(MediaPlayer::ReadyState::HaveNothing); |
| 189 | } else |
| 190 | setNetworkState(MediaPlayer::NetworkState::Loading); |
| 191 | } |
| 192 | |
| 193 | if (m_preload > MediaPlayer::Preload::MetaData) { |
| 194 | for (auto it = m_readyForMoreSamplesMap.begin(); it != m_readyForMoreSamplesMap.end(); ++it) |
| 195 | notifyClientWhenReadyForMoreSamples(it->first); |
| 196 | } |
| 197 | } |
| 198 | |
| 199 | void MediaPlayerPrivateWebM::load(const URL& url, const LoadOptions& options) |
| 200 | { |
| 201 | ALWAYS_LOG(LOGIDENTIFIER)Ref { logger() }->logAlways(logChannel(), WTF::Logger::LogSiteIdentifier (logClassName(), __func__, logIdentifier())); |
| 202 | |
| 203 | setReadyState(MediaPlayer::ReadyState::HaveNothing); |
| 204 | |
| 205 | m_assetURL = url; |
| 206 | if (options.supportsLimitedMatroska) |
| 207 | m_parser->allowLimitedMatroska(); |
| 208 | |
| 209 | m_renderer->setPreferences(options.videoRendererPreferences | VideoRendererPreference::PrefersDecompressionSession); |
| 210 | |
| 211 | m_renderer->notifyWhenErrorOccurs([weakThis = ThreadSafeWeakPtr { *this }](PlatformMediaError error) { |
| 212 | ensureOnMainThread([weakThis, error] { |
| 213 | if (RefPtr protectedThis = weakThis.get()) { |
| 214 | protectedThis->m_errored = true; |
| 215 | if (RefPtr player = protectedThis->m_player.get(); player && error == PlatformMediaError::IPCError) { |
| 216 | player->reloadAndResumePlaybackIfNeeded(); |
| 217 | return; |
| 218 | } |
| 219 | protectedThis->setNetworkState(MediaPlayer::NetworkState::DecodeError); |
| 220 | protectedThis->setReadyState(MediaPlayer::ReadyState::HaveNothing); |
| 221 | } |
| 222 | }); |
| 223 | }); |
| 224 | |
| 225 | m_renderer->notifyFirstFrameAvailable([weakThis = ThreadSafeWeakPtr { *this }] { |
| 226 | ensureOnMainThread([weakThis] { |
| 227 | if (RefPtr protectedThis = weakThis.get()) |
| 228 | protectedThis->setHasAvailableVideoFrame(true); |
| 229 | }); |
| 230 | }); |
| 231 | |
| 232 | m_renderer->notifyWhenRequiresFlushToResume([weakThis = ThreadSafeWeakPtr { *this }] { |
| 233 | ensureOnMainThread([weakThis] { |
| 234 | if (RefPtr protectedThis = weakThis.get()) |
| 235 | protectedThis->setLayerRequiresFlush(); |
| 236 | }); |
| 237 | }); |
| 238 | |
| 239 | m_renderer->notifyRenderingModeChanged([weakThis = ThreadSafeWeakPtr { *this }] { |
| 240 | ensureOnMainThread([weakThis] { |
| 241 | if (RefPtr protectedThis = weakThis.get()) { |
| 242 | if (RefPtr player = protectedThis->m_player.get()) |
| 243 | player->renderingModeChanged(); |
| 244 | } |
| 245 | }); |
| 246 | }); |
| 247 | |
| 248 | m_renderer->notifySizeChanged([weakThis = ThreadSafeWeakPtr { *this }](const MediaTime&, FloatSize size) { |
| 249 | ensureOnMainThread([weakThis, size] { |
| 250 | if (RefPtr protectedThis = weakThis.get()) |
| 251 | protectedThis->setNaturalSize(size); |
| 252 | }); |
| 253 | }); |
| 254 | |
| 255 | m_renderer->notifyEffectiveRateChanged([weakThis = ThreadSafeWeakPtr { *this }](double) { |
| 256 | ensureOnMainThread([weakThis] { |
| 257 | if (RefPtr protectedThis = weakThis.get()) |
| 258 | protectedThis->effectiveRateChanged(); |
| 259 | }); |
| 260 | }); |
| 261 | |
| 262 | m_renderer->setPreferences(VideoRendererPreference::PrefersDecompressionSession); |
| 263 | |
| 264 | m_renderer->notifyVideoLayerSizeChanged([weakThis = ThreadSafeWeakPtr { *this }](const MediaTime&, FloatSize size) { |
| 265 | ensureOnMainThread([weakThis, size] { |
| 266 | if (RefPtr protectedThis = weakThis.get()) { |
| 267 | if (RefPtr player = protectedThis->m_player.get()) |
| 268 | player->videoLayerSizeDidChange(size); |
| 269 | } |
| 270 | }); |
| 271 | }); |
| 272 | |
| 273 | if (RefPtr player = m_player.get()) { |
| 274 | m_renderer->setVolume(player->volume()); |
| 275 | m_renderer->setMuted(player->muted()); |
| 276 | m_renderer->setPreservesPitchAndCorrectionAlgorithm(player->preservesPitch(), player->pitchCorrectionAlgorithm()); |
| 277 | #if HAVE(AUDIO_OUTPUT_DEVICE_UNIQUE_ID)(defined 1 && 1) |
| 278 | m_renderer->setOutputDeviceId(player->audioOutputDeviceIdOverride()); |
| 279 | #endif |
| 280 | #if ENABLE(LINEAR_MEDIA_PLAYER)(defined ENABLE_LINEAR_MEDIA_PLAYER && ENABLE_LINEAR_MEDIA_PLAYER ) |
| 281 | m_renderer->setVideoTarget(player->videoTarget()); |
| 282 | #endif |
| 283 | m_renderer->setPresentationSize(player->presentationSize()); |
| 284 | m_renderer->renderingCanBeAcceleratedChanged(player->renderingCanBeAccelerated()); |
| 285 | } |
| 286 | |
| 287 | doPreload(); |
| 288 | } |
| 289 | |
| 290 | bool MediaPlayerPrivateWebM::needsResourceClient() const |
| 291 | { |
| 292 | return !m_resourceClient && m_needsResourceClient; |
| 293 | } |
| 294 | |
| 295 | bool MediaPlayerPrivateWebM::createResourceClientIfNeeded() |
| 296 | { |
| 297 | ALWAYS_LOG(LOGIDENTIFIER)Ref { logger() }->logAlways(logChannel(), WTF::Logger::LogSiteIdentifier (logClassName(), __func__, logIdentifier())); |
| 298 | |
| 299 | ASSERT(needsResourceClient())((void)0); |
| 300 | |
| 301 | RefPtr player = m_player.get(); |
| 302 | if (!player) |
| 303 | return false; |
| 304 | |
| 305 | ResourceRequest request(URL { m_assetURL }); |
| 306 | request.setAllowCookies(true); |
| 307 | if (m_contentReceived) { |
| 308 | if (!m_contentLength) |
| 309 | return false; |
| 310 | if (m_contentLength <= m_contentReceived) { |
| 311 | m_needsResourceClient = false; |
| 312 | return true; |
| 313 | } |
| 314 | request.addHTTPHeaderField(HTTPHeaderName::Range, makeString("bytes="_s, m_contentReceived, '-', m_contentLength)); |
| 315 | } |
| 316 | |
| 317 | m_resourceClient = WebMResourceClient::create(*this, player->mediaResourceLoader(), WTF::move(request)); |
| 318 | |
| 319 | return !!m_resourceClient; |
| 320 | } |
| 321 | |
| 322 | #if ENABLE(MEDIA_SOURCE)(defined 1 && 1) |
| 323 | void MediaPlayerPrivateWebM::load(const URL&, const LoadOptions&, MediaSourcePrivateClient&) |
| 324 | { |
| 325 | ERROR_LOG(LOGIDENTIFIER, "tried to load as mediasource")Ref { logger() }->error(logChannel(), WTF::Logger::LogSiteIdentifier (logClassName(), __func__, logIdentifier()), "tried to load as mediasource" ); |
| 326 | |
| 327 | setNetworkState(MediaPlayer::NetworkState::FormatError); |
| 328 | } |
| 329 | #endif |
| 330 | |
| 331 | #if ENABLE(MEDIA_STREAM)(defined 1 && 1) |
| 332 | void MediaPlayerPrivateWebM::load(MediaStreamPrivate&) |
| 333 | { |
| 334 | ERROR_LOG(LOGIDENTIFIER, "tried to load as mediastream")Ref { logger() }->error(logChannel(), WTF::Logger::LogSiteIdentifier (logClassName(), __func__, logIdentifier()), "tried to load as mediastream" ); |
| 335 | |
| 336 | setNetworkState(MediaPlayer::NetworkState::FormatError); |
| 337 | } |
| 338 | #endif |
| 339 | |
| 340 | void MediaPlayerPrivateWebM::dataLengthReceived(size_t length) |
| 341 | { |
| 342 | callOnMainThread([protectedThis = Ref { *this }, length] { |
| 343 | protectedThis->m_contentLength = length; |
| 344 | }); |
| 345 | } |
| 346 | |
| 347 | void MediaPlayerPrivateWebM::dataReceived(const SharedBuffer& buffer) |
| 348 | { |
| 349 | ALWAYS_LOG(LOGIDENTIFIER, "data length = ", buffer.size())Ref { logger() }->logAlways(logChannel(), WTF::Logger::LogSiteIdentifier (logClassName(), __func__, logIdentifier()), "data length = " , buffer.size()); |
| 350 | |
| 351 | callOnMainThread([protectedThis = Ref { *this }, this, size = buffer.size()] { |
| 352 | setNetworkState(MediaPlayer::NetworkState::Loading); |
| 353 | m_pendingAppends++; |
| 354 | m_contentReceived += size; |
| 355 | }); |
| 356 | |
| 357 | invokeAsync(m_appendQueue, [buffer = Ref { buffer }, parser = m_parser]() mutable { |
| 358 | return MediaPromise::createAndSettle(parser->appendData(WTF::move(buffer))); |
| 359 | })->whenSettled(RunLoop::mainSingleton(), [weakThis = ThreadSafeWeakPtr { *this }](auto&& result) { |
| 360 | if (RefPtr protectedThis = weakThis.get()) |
| 361 | protectedThis->appendCompleted(!!result); |
| 362 | }); |
| 363 | } |
| 364 | |
| 365 | void MediaPlayerPrivateWebM::loadFailed(const ResourceError& error) |
| 366 | { |
| 367 | ERROR_LOG(LOGIDENTIFIER, "resource failed to load with code ", error.errorCode())Ref { logger() }->error(logChannel(), WTF::Logger::LogSiteIdentifier (logClassName(), __func__, logIdentifier()), "resource failed to load with code " , error.errorCode()); |
| 368 | callOnMainThread([protectedThis = Ref { *this }] { |
| 369 | protectedThis->setNetworkState(MediaPlayer::NetworkState::NetworkError); |
| 370 | }); |
| 371 | } |
| 372 | |
| 373 | void MediaPlayerPrivateWebM::loadFinished() |
| 374 | { |
| 375 | ALWAYS_LOG(LOGIDENTIFIER)Ref { logger() }->logAlways(logChannel(), WTF::Logger::LogSiteIdentifier (logClassName(), __func__, logIdentifier())); |
| 376 | callOnMainThread([protectedThis = Ref { *this }] { |
| 377 | protectedThis->m_loadFinished = true; |
| 378 | protectedThis->maybeFinishLoading(); |
| 379 | }); |
| 380 | } |
| 381 | |
| 382 | void MediaPlayerPrivateWebM::cancelLoad() |
| 383 | { |
| 384 | if (RefPtr resourceClient = m_resourceClient) { |
| 385 | resourceClient->stop(); |
| 386 | m_resourceClient = nullptr; |
| 387 | } |
| 388 | } |
| 389 | |
| 390 | PlatformLayer* MediaPlayerPrivateWebM::platformLayer() const |
| 391 | { |
| 392 | return m_renderer->platformVideoLayer(); |
| 393 | } |
| 394 | |
| 395 | void MediaPlayerPrivateWebM::prepareToPlay() |
| 396 | { |
| 397 | ALWAYS_LOG(LOGIDENTIFIER)Ref { logger() }->logAlways(logChannel(), WTF::Logger::LogSiteIdentifier (logClassName(), __func__, logIdentifier())); |
| 398 | setPreload(MediaPlayer::Preload::Auto); |
| 399 | } |
| 400 | |
| 401 | void MediaPlayerPrivateWebM::play() |
| 402 | { |
| 403 | ALWAYS_LOG(LOGIDENTIFIER)Ref { logger() }->logAlways(logChannel(), WTF::Logger::LogSiteIdentifier (logClassName(), __func__, logIdentifier())); |
| 404 | playInternal(); |
| 405 | } |
| 406 | |
| 407 | void MediaPlayerPrivateWebM::pause() |
| 408 | { |
| 409 | ALWAYS_LOG(LOGIDENTIFIER)Ref { logger() }->logAlways(logChannel(), WTF::Logger::LogSiteIdentifier (logClassName(), __func__, logIdentifier())); |
| 410 | m_renderer->pause(); |
| 411 | } |
| 412 | |
| 413 | bool MediaPlayerPrivateWebM::paused() const |
| 414 | { |
| 415 | return m_renderer->paused(); |
| 416 | } |
| 417 | |
| 418 | bool MediaPlayerPrivateWebM::playAtHostTime(const MonotonicTime& hostTime) |
| 419 | { |
| 420 | ALWAYS_LOG(LOGIDENTIFIER)Ref { logger() }->logAlways(logChannel(), WTF::Logger::LogSiteIdentifier (logClassName(), __func__, logIdentifier())); |
| 421 | playInternal(hostTime); |
| 422 | return true; |
| 423 | } |
| 424 | |
| 425 | bool MediaPlayerPrivateWebM::pauseAtHostTime(const MonotonicTime& hostTime) |
| 426 | { |
| 427 | ALWAYS_LOG(LOGIDENTIFIER)Ref { logger() }->logAlways(logChannel(), WTF::Logger::LogSiteIdentifier (logClassName(), __func__, logIdentifier())); |
| 428 | m_renderer->pause(hostTime); |
| 429 | return true; |
| 430 | } |
| 431 | |
| 432 | void MediaPlayerPrivateWebM::playInternal(std::optional<MonotonicTime> hostTime) |
| 433 | { |
| 434 | ALWAYS_LOG(LOGIDENTIFIER)Ref { logger() }->logAlways(logChannel(), WTF::Logger::LogSiteIdentifier (logClassName(), __func__, logIdentifier())); |
| 435 | flushVideoIfNeeded(); |
| 436 | |
| 437 | m_renderer->play(hostTime); |
| 438 | |
| 439 | if (!shouldBePlaying()) |
| 440 | return; |
| 441 | |
| 442 | if (currentTime() >= duration()) |
| 443 | seekToTarget(SeekTarget::zero()); |
| 444 | } |
| 445 | |
| 446 | bool MediaPlayerPrivateWebM::performTaskAtTime(Function<void(const MediaTime&)>&& task, const MediaTime& time) |
| 447 | { |
| 448 | ALWAYS_LOG(LOGIDENTIFIER, time)Ref { logger() }->logAlways(logChannel(), WTF::Logger::LogSiteIdentifier (logClassName(), __func__, logIdentifier()), time); |
| 449 | |
| 450 | m_renderer->performTaskAtTime(time, [task = WTF::move(task)](const MediaTime& time) mutable { |
| 451 | ensureOnMainThread([time, task = WTF::move(task)] { |
| 452 | task(time); |
| 453 | }); |
| 454 | }); |
| 455 | return true; |
| 456 | } |
| 457 | |
| 458 | void MediaPlayerPrivateWebM::audioOutputDeviceChanged() |
| 459 | { |
| 460 | #if HAVE(AUDIO_OUTPUT_DEVICE_UNIQUE_ID)(defined 1 && 1) |
| 461 | if (RefPtr player = m_player.get()) |
| 462 | m_renderer->setOutputDeviceId(player->audioOutputDeviceId()); |
| 463 | #endif |
| 464 | } |
| 465 | |
| 466 | bool MediaPlayerPrivateWebM::timeIsProgressing() const |
| 467 | { |
| 468 | return m_renderer->timeIsProgressing(); |
| 469 | } |
| 470 | |
| 471 | void MediaPlayerPrivateWebM::setPageIsVisible(bool visible) |
| 472 | { |
| 473 | if (m_visible == visible) |
| 474 | return; |
| 475 | |
| 476 | ALWAYS_LOG(LOGIDENTIFIER, visible)Ref { logger() }->logAlways(logChannel(), WTF::Logger::LogSiteIdentifier (logClassName(), __func__, logIdentifier()), visible); |
| 477 | m_visible = visible; |
| 478 | m_renderer->setIsVisible(visible); |
| 479 | |
| 480 | #if HAVE(SPATIAL_TRACKING_LABEL)(defined HAVE_SPATIAL_TRACKING_LABEL && HAVE_SPATIAL_TRACKING_LABEL ) |
| 481 | updateSpatialTrackingLabel(); |
| 482 | #endif |
| 483 | } |
| 484 | |
| 485 | MediaTime MediaPlayerPrivateWebM::currentTime() const |
| 486 | { |
| 487 | return m_renderer->currentTime(); |
| 488 | } |
| 489 | |
| 490 | void MediaPlayerPrivateWebM::seekToTarget(const SeekTarget& target) |
| 491 | { |
| 492 | ALWAYS_LOG(LOGIDENTIFIER, "time = ", target.time, ", negativeThreshold = ", target.negativeThreshold, ", positiveThreshold = ", target.positiveThreshold)Ref { logger() }->logAlways(logChannel(), WTF::Logger::LogSiteIdentifier (logClassName(), __func__, logIdentifier()), "time = ", target .time, ", negativeThreshold = ", target.negativeThreshold, ", positiveThreshold = " , target.positiveThreshold); |
| 493 | |
| 494 | m_pendingSeek = target; |
| 495 | |
| 496 | if (m_seekTimer.isActive()) |
| 497 | m_seekTimer.stop(); |
| 498 | m_seekTimer.startOneShot(0_s); |
| 499 | } |
| 500 | |
| 501 | void MediaPlayerPrivateWebM::seekInternal() |
| 502 | { |
| 503 | if (!m_pendingSeek) |
| 504 | return; |
| 505 | |
| 506 | ALWAYS_LOG(LOGIDENTIFIER, m_pendingSeek->time)Ref { logger() }->logAlways(logChannel(), WTF::Logger::LogSiteIdentifier (logClassName(), __func__, logIdentifier()), m_pendingSeek-> time); |
| 507 | |
| 508 | auto pendingSeek = std::exchange(m_pendingSeek, { }).value(); |
| 509 | m_lastSeekTime = pendingSeek.time; |
| 510 | |
| 511 | cancelPendingSeek(); |
| 512 | |
| 513 | m_seeking = true; |
| 514 | |
| 515 | m_renderer->prepareToSeek(); |
| 516 | |
| 517 | waitForTimeBuffered(m_lastSeekTime)->whenSettled(RunLoop::mainSingleton(), [weakThis = ThreadSafeWeakPtr { *this }, seekTime = m_lastSeekTime](auto&& result) { |
| 518 | RefPtr protectedThis = weakThis.get(); |
| 519 | if (!result || !protectedThis) |
| 520 | return; // seek cancelled. |
| 521 | |
| 522 | return protectedThis->startSeek(seekTime); |
| 523 | }); |
| 524 | } |
| 525 | |
| 526 | void MediaPlayerPrivateWebM::cancelPendingSeek() |
| 527 | { |
| 528 | if (m_rendererSeekRequest->hasCallback()) |
| 529 | m_rendererSeekRequest->disconnect(); |
| 530 | if (auto promise = std::exchange(m_waitForTimeBufferedPromise, std::nullopt)) |
| 531 | promise->reject(); |
| 532 | } |
| 533 | |
| 534 | void MediaPlayerPrivateWebM::startSeek(const MediaTime& seekTime) |
| 535 | { |
| 536 | m_renderer->seekTo(seekTime)->whenSettled(RunLoop::mainSingleton(), [weakThis = ThreadSafeWeakPtr { *this }, seekTime](auto&& result) { |
| 537 | if (!result && result.error() != PlatformMediaError::RequiresFlushToResume) |
| 538 | return; // cancelled. |
| 539 | |
| 540 | RefPtr protectedThis = weakThis.get(); |
| 541 | if (!protectedThis) |
| 542 | return; |
| 543 | |
| 544 | protectedThis->m_rendererSeekRequest->complete(); |
| 545 | |
| 546 | if (!result) { |
| 547 | ASSERT(result.error() == PlatformMediaError::RequiresFlushToResume)((void)0); |
| 548 | protectedThis->flush(); |
| 549 | protectedThis->reenqueueMediaForTime(seekTime); |
| 550 | // Try seeking again. |
| 551 | return protectedThis->startSeek(seekTime); |
| 552 | } |
| 553 | protectedThis->completeSeek(*result); |
| 554 | })->track(m_rendererSeekRequest.get()); |
| 555 | } |
| 556 | |
| 557 | void MediaPlayerPrivateWebM::completeSeek(const MediaTime& seekedTime) |
| 558 | { |
| 559 | ALWAYS_LOG(LOGIDENTIFIER, "")Ref { logger() }->logAlways(logChannel(), WTF::Logger::LogSiteIdentifier (logClassName(), __func__, logIdentifier()), ""); |
| 560 | |
| 561 | m_seeking = false; |
| 562 | |
| 563 | monitorReadyState(); |
| 564 | |
| 565 | if (RefPtr player = m_player.get()) { |
| 566 | player->seeked(seekedTime); |
| 567 | player->timeChanged(); |
| 568 | } |
| 569 | } |
| 570 | |
| 571 | Ref<GenericPromise> MediaPlayerPrivateWebM::waitForTimeBuffered(const MediaTime& time) |
| 572 | { |
| 573 | ASSERT(!m_waitForTimeBufferedPromise)((void)0); |
| 574 | |
| 575 | if (m_buffered.containWithEpsilon(time, timeFudgeFactor())) { |
| 576 | ALWAYS_LOG(LOGIDENTIFIER, "buffered contains seektime, resolving")Ref { logger() }->logAlways(logChannel(), WTF::Logger::LogSiteIdentifier (logClassName(), __func__, logIdentifier()), "buffered contains seektime, resolving" ); |
| 577 | return GenericPromise::createAndResolve(); |
| 578 | } |
| 579 | |
| 580 | setReadyState(MediaPlayer::ReadyState::HaveMetadata); |
| 581 | |
| 582 | ALWAYS_LOG(LOGIDENTIFIER, "buffered doesn't contain seektime waiting")Ref { logger() }->logAlways(logChannel(), WTF::Logger::LogSiteIdentifier (logClassName(), __func__, logIdentifier()), "buffered doesn't contain seektime waiting" ); |
| 583 | m_waitForTimeBufferedPromise.emplace(); |
| 584 | return m_waitForTimeBufferedPromise->promise(); |
| 585 | } |
| 586 | |
| 587 | bool MediaPlayerPrivateWebM::seeking() const |
| 588 | { |
| 589 | return m_pendingSeek || m_seeking; |
| 590 | } |
| 591 | |
| 592 | bool MediaPlayerPrivateWebM::shouldBePlaying() const |
| 593 | { |
| 594 | return !m_renderer->paused() && !seeking(); |
| 595 | } |
| 596 | |
| 597 | void MediaPlayerPrivateWebM::setRateDouble(double rate) |
| 598 | { |
| 599 | if (rate == m_rate) |
| 600 | return; |
| 601 | |
| 602 | m_rate = std::max<double>(rate, 0); |
| 603 | |
| 604 | m_renderer->setRate(m_rate); |
| 605 | |
| 606 | if (RefPtr player = m_player.get()) |
| 607 | player->rateChanged(); |
| 608 | } |
| 609 | |
| 610 | double MediaPlayerPrivateWebM::effectiveRate() const |
| 611 | { |
| 612 | return m_renderer->effectiveRate(); |
| 613 | } |
| 614 | |
| 615 | void MediaPlayerPrivateWebM::setVolume(float volume) |
| 616 | { |
| 617 | m_renderer->setVolume(volume); |
| 618 | } |
| 619 | |
| 620 | void MediaPlayerPrivateWebM::setMuted(bool muted) |
| 621 | { |
| 622 | m_renderer->setMuted(muted); |
| 623 | } |
| 624 | |
| 625 | const PlatformTimeRanges& MediaPlayerPrivateWebM::buffered() const |
| 626 | { |
| 627 | return m_buffered; |
| 628 | } |
| 629 | |
| 630 | void MediaPlayerPrivateWebM::setBufferedRanges(PlatformTimeRanges timeRanges) |
| 631 | { |
| 632 | if (m_buffered == timeRanges) |
| 633 | return; |
| 634 | m_buffered = WTF::move(timeRanges); |
| 635 | if (RefPtr player = m_player.get()) { |
| 636 | player->bufferedTimeRangesChanged(); |
| 637 | player->seekableTimeRangesChanged(); |
| 638 | } |
| 639 | |
| 640 | monitorReadyState(); |
| 641 | } |
| 642 | |
| 643 | void MediaPlayerPrivateWebM::updateBufferedFromTrackBuffers(bool ended) |
| 644 | { |
| 645 | MediaTime highestEndTime = MediaTime::negativeInfiniteTime(); |
| 646 | for (auto& pair : m_trackBufferMap) { |
| 647 | auto& trackBuffer = pair.second; |
| 648 | if (!trackBuffer->buffered().length()) |
| 649 | continue; |
| 650 | highestEndTime = std::max(highestEndTime, trackBuffer->maximumBufferedTime()); |
| 651 | } |
| 652 | |
| 653 | // NOTE: Short circuit the following if none of the TrackBuffers have buffered ranges to avoid generating |
| 654 | // a single range of {0, 0}. |
| 655 | if (highestEndTime.isNegativeInfinite()) { |
| 656 | setBufferedRanges(PlatformTimeRanges()); |
| 657 | return; |
| 658 | } |
| 659 | |
| 660 | PlatformTimeRanges intersectionRanges { MediaTime::zeroTime(), highestEndTime }; |
| 661 | |
| 662 | for (auto& pair : m_trackBufferMap) { |
| 663 | auto& trackBuffer = pair.second; |
| 664 | if (!trackBuffer->buffered().length()) |
| 665 | continue; |
| 666 | |
| 667 | PlatformTimeRanges trackRanges = trackBuffer->buffered(); |
| 668 | |
| 669 | if (ended) |
| 670 | trackRanges.add(trackRanges.maximumBufferedTime(), highestEndTime); |
| 671 | |
| 672 | intersectionRanges.intersectWith(trackRanges); |
| 673 | } |
| 674 | |
| 675 | setBufferedRanges(WTF::move(intersectionRanges)); |
| 676 | } |
| 677 | |
| 678 | void MediaPlayerPrivateWebM::updateDurationFromTrackBuffers() |
| 679 | { |
| 680 | MediaTime highestEndTime = MediaTime::zeroTime(); |
| 681 | for (auto& pair : m_trackBufferMap) { |
| 682 | auto& trackBuffer = pair.second; |
| 683 | if (!trackBuffer->highestPresentationTimestamp()) |
| 684 | continue; |
| 685 | highestEndTime = std::max(highestEndTime, trackBuffer->highestPresentationTimestamp()); |
| 686 | } |
| 687 | |
| 688 | setDuration(WTF::move(highestEndTime)); |
| 689 | } |
| 690 | |
| 691 | void MediaPlayerPrivateWebM::setLoadingProgresssed(bool loadingProgressed) |
| 692 | { |
| 693 | INFO_LOG(LOGIDENTIFIER, loadingProgressed)Ref { logger() }->info(logChannel(), WTF::Logger::LogSiteIdentifier (logClassName(), __func__, logIdentifier()), loadingProgressed ); |
| 694 | m_loadingProgressed = loadingProgressed; |
| 695 | } |
| 696 | |
| 697 | bool MediaPlayerPrivateWebM::didLoadingProgress() const |
| 698 | { |
| 699 | return std::exchange(m_loadingProgressed, false); |
| 700 | } |
| 701 | |
| 702 | RefPtr<NativeImage> MediaPlayerPrivateWebM::nativeImageForCurrentTime() |
| 703 | { |
| 704 | updateLastImage(); |
| 705 | return m_lastImage; |
| 706 | } |
| 707 | |
| 708 | bool MediaPlayerPrivateWebM::updateLastVideoFrame() |
| 709 | { |
| 710 | RefPtr videoFrame = m_renderer->currentVideoFrame(); |
| 711 | if (!videoFrame) |
| 712 | return false; |
| 713 | |
| 714 | INFO_LOG(LOGIDENTIFIER, "displayed pixelbuffer copied for time ", videoFrame->presentationTime())Ref { logger() }->info(logChannel(), WTF::Logger::LogSiteIdentifier (logClassName(), __func__, logIdentifier()), "displayed pixelbuffer copied for time " , videoFrame->presentationTime()); |
| 715 | m_lastVideoFrame = WTF::move(videoFrame); |
| 716 | return true; |
| 717 | } |
| 718 | |
| 719 | bool MediaPlayerPrivateWebM::updateLastImage() |
| 720 | { |
| 721 | if (m_isGatheringVideoFrameMetadata) { |
| 722 | auto metrics = m_renderer->videoPlaybackQualityMetrics(); |
| 723 | auto sampleCount = metrics ? metrics->displayCompositedVideoFrames : 0; |
| 724 | if (sampleCount == m_lastConvertedSampleCount) |
| 725 | return false; |
| 726 | m_lastConvertedSampleCount = sampleCount; |
| 727 | } |
| 728 | m_lastImage = m_renderer->currentNativeImage(); |
| 729 | return !!m_lastImage; |
| 730 | } |
| 731 | |
| 732 | void MediaPlayerPrivateWebM::paint(GraphicsContext& context, const FloatRect& rect) |
| 733 | { |
| 734 | paintCurrentFrameInContext(context, rect); |
| 735 | } |
| 736 | |
| 737 | void MediaPlayerPrivateWebM::paintCurrentFrameInContext(GraphicsContext& context, const FloatRect& outputRect) |
| 738 | { |
| 739 | m_renderer->paintCurrentVideoFrameInContext(context, outputRect); |
| 740 | } |
| 741 | |
| 742 | RefPtr<VideoFrame> MediaPlayerPrivateWebM::videoFrameForCurrentTime() |
| 743 | { |
| 744 | if (!m_isGatheringVideoFrameMetadata) |
| 745 | updateLastVideoFrame(); |
| 746 | return m_lastVideoFrame; |
| 747 | } |
| 748 | |
| 749 | DestinationColorSpace MediaPlayerPrivateWebM::colorSpace() |
| 750 | { |
| 751 | updateLastImage(); |
| 752 | RefPtr lastImage = m_lastImage; |
| 753 | return lastImage ? lastImage->colorSpace() : DestinationColorSpace::SRGB(); |
| 754 | } |
| 755 | |
| 756 | Ref<MediaPlayer::BitmapImagePromise> MediaPlayerPrivateWebM::bitmapImageForCurrentTime() |
| 757 | { |
| 758 | return m_renderer->currentNativeImageAsync()->whenSettled(RunLoop::mainSingleton(), [weakThis = ThreadSafeWeakPtr { *this }](auto&& result) { |
| 759 | RefPtr protectedThis = weakThis.get(); |
| 760 | if (!protectedThis || !result) |
| 761 | return BitmapImagePromise::createAndReject(); |
| 762 | if (RefPtr bitmap = bitmapFromImage(*result)) |
Call argument is uncounted and unsafe | |
| 763 | return BitmapImagePromise::createAndResolve(bitmap.releaseNonNull()); |
| 764 | return BitmapImagePromise::createAndReject(); |
| 765 | }); |
| 766 | } |
| 767 | |
| 768 | void MediaPlayerPrivateWebM::setNaturalSize(FloatSize size) |
| 769 | { |
| 770 | auto oldSize = m_naturalSize; |
| 771 | m_naturalSize = size; |
| 772 | if (oldSize != m_naturalSize) { |
| 773 | INFO_LOG(LOGIDENTIFIER, "was ", oldSize, ", is ", size)Ref { logger() }->info(logChannel(), WTF::Logger::LogSiteIdentifier (logClassName(), __func__, logIdentifier()), "was ", oldSize, ", is ", size); |
| 774 | if (RefPtr player = m_player.get()) |
| 775 | player->sizeChanged(); |
| 776 | } |
| 777 | } |
| 778 | |
| 779 | void MediaPlayerPrivateWebM::effectiveRateChanged() |
| 780 | { |
| 781 | ALWAYS_LOG(LOGIDENTIFIER, effectiveRate())Ref { logger() }->logAlways(logChannel(), WTF::Logger::LogSiteIdentifier (logClassName(), __func__, logIdentifier()), effectiveRate()); |
| 782 | if (RefPtr player = m_player.get()) |
| 783 | player->rateChanged(); |
| 784 | } |
| 785 | |
| 786 | void MediaPlayerPrivateWebM::setHasAudio(bool hasAudio) |
| 787 | { |
| 788 | if (hasAudio == m_hasAudio) |
| 789 | return; |
| 790 | |
| 791 | m_hasAudio = hasAudio; |
| 792 | characteristicsChanged(); |
| 793 | } |
| 794 | |
| 795 | void MediaPlayerPrivateWebM::setHasVideo(bool hasVideo) |
| 796 | { |
| 797 | if (hasVideo == m_hasVideo) |
| 798 | return; |
| 799 | |
| 800 | m_hasVideo = hasVideo; |
| 801 | characteristicsChanged(); |
| 802 | } |
| 803 | |
| 804 | void MediaPlayerPrivateWebM::setHasAvailableVideoFrame(bool hasAvailableVideoFrame) |
| 805 | { |
| 806 | if (m_hasAvailableVideoFrame == hasAvailableVideoFrame) |
| 807 | return; |
| 808 | |
| 809 | ALWAYS_LOG(LOGIDENTIFIER, hasAvailableVideoFrame)Ref { logger() }->logAlways(logChannel(), WTF::Logger::LogSiteIdentifier (logClassName(), __func__, logIdentifier()), hasAvailableVideoFrame ); |
| 810 | m_hasAvailableVideoFrame = hasAvailableVideoFrame; |
| 811 | |
| 812 | if (!m_hasAvailableVideoFrame) |
| 813 | return; |
| 814 | |
| 815 | if (RefPtr player = m_player.get()) |
| 816 | player->firstVideoFrameAvailable(); |
| 817 | |
| 818 | if (m_readyState <= MediaPlayer::ReadyState::HaveMetadata) { |
| 819 | setReadyState(MediaPlayer::ReadyState::HaveCurrentData); |
| 820 | return; |
| 821 | } |
| 822 | |
| 823 | if (!m_readyStateIsWaitingForAvailableFrame) |
| 824 | return; |
| 825 | |
| 826 | m_readyStateIsWaitingForAvailableFrame = false; |
| 827 | if (RefPtr player = m_player.get()) |
| 828 | player->readyStateChanged(); |
| 829 | } |
| 830 | |
| 831 | void MediaPlayerPrivateWebM::setDuration(MediaTime duration) |
| 832 | { |
| 833 | if (duration == m_duration) |
| 834 | return; |
| 835 | |
| 836 | m_renderer->notifyTimeReachedAndStall(duration, [weakThis = ThreadSafeWeakPtr { *this }](const MediaTime&) { |
| 837 | ensureOnMainThread([weakThis] { |
| 838 | if (RefPtr protectedThis = weakThis.get()) { |
| 839 | protectedThis->m_renderer->pause(); |
| 840 | if (RefPtr player = protectedThis->m_player.get()) |
| 841 | player->timeChanged(); |
| 842 | } |
| 843 | }); |
| 844 | }); |
| 845 | |
| 846 | m_duration = WTF::move(duration); |
| 847 | if (RefPtr player = m_player.get()) |
| 848 | player->durationChanged(); |
| 849 | |
| 850 | monitorReadyState(); |
| 851 | } |
| 852 | |
| 853 | void MediaPlayerPrivateWebM::setNetworkState(MediaPlayer::NetworkState state) |
| 854 | { |
| 855 | if (state == m_networkState) |
| 856 | return; |
| 857 | |
| 858 | ALWAYS_LOG(LOGIDENTIFIER, state)Ref { logger() }->logAlways(logChannel(), WTF::Logger::LogSiteIdentifier (logClassName(), __func__, logIdentifier()), state); |
| 859 | m_networkState = state; |
| 860 | if (RefPtr player = m_player.get()) |
| 861 | player->networkStateChanged(); |
| 862 | } |
| 863 | |
| 864 | void MediaPlayerPrivateWebM::setReadyState(MediaPlayer::ReadyState state) |
| 865 | { |
| 866 | if (state == m_readyState) |
| 867 | return; |
| 868 | |
| 869 | m_readyState = state; |
| 870 | bool waitingOnAvailableFrame = m_readyState >= MediaPlayer::ReadyState::HaveCurrentData && hasVideo() && !m_hasAvailableVideoFrame; |
| 871 | ALWAYS_LOG(LOGIDENTIFIER, state, " waitingOnAvailableVideoFrame: ", waitingOnAvailableFrame)Ref { logger() }->logAlways(logChannel(), WTF::Logger::LogSiteIdentifier (logClassName(), __func__, logIdentifier()), state, " waitingOnAvailableVideoFrame: " , waitingOnAvailableFrame); |
| 872 | |
| 873 | m_readyStateIsWaitingForAvailableFrame = waitingOnAvailableFrame; |
| 874 | if (waitingOnAvailableFrame) |
| 875 | return; |
| 876 | |
| 877 | if (RefPtr player = m_player.get()) |
| 878 | player->readyStateChanged(); |
| 879 | } |
| 880 | |
| 881 | void MediaPlayerPrivateWebM::characteristicsChanged() |
| 882 | { |
| 883 | if (RefPtr player = m_player.get()) |
| 884 | player->characteristicChanged(); |
| 885 | } |
| 886 | |
| 887 | void MediaPlayerPrivateWebM::setPreservesPitch(bool preservesPitch) |
| 888 | { |
| 889 | ALWAYS_LOG(LOGIDENTIFIER, preservesPitch)Ref { logger() }->logAlways(logChannel(), WTF::Logger::LogSiteIdentifier (logClassName(), __func__, logIdentifier()), preservesPitch); |
| 890 | if (RefPtr player = m_player.get()) |
| 891 | m_renderer->setPreservesPitchAndCorrectionAlgorithm(preservesPitch, player->pitchCorrectionAlgorithm()); |
| 892 | } |
| 893 | |
| 894 | void MediaPlayerPrivateWebM::setPresentationSize(const IntSize& newSize) |
| 895 | { |
| 896 | m_renderer->setPresentationSize(newSize); |
| 897 | } |
| 898 | |
| 899 | void MediaPlayerPrivateWebM::acceleratedRenderingStateChanged() |
| 900 | { |
| 901 | RefPtr player = m_player.get(); |
| 902 | m_renderer->renderingCanBeAcceleratedChanged(player ? player->renderingCanBeAccelerated() : false); |
| 903 | } |
| 904 | |
| 905 | RetainPtr<PlatformLayer> MediaPlayerPrivateWebM::createVideoFullscreenLayer() |
| 906 | { |
| 907 | return adoptNS([[CALayer alloc] init]); |
| 908 | } |
| 909 | |
| 910 | void MediaPlayerPrivateWebM::setVideoFullscreenLayer(PlatformLayer *videoFullscreenLayer, WTF::Function<void()>&& completionHandler) |
| 911 | { |
| 912 | m_renderer->setVideoFullscreenLayer(videoFullscreenLayer, WTF::move(completionHandler)); |
| 913 | } |
| 914 | |
| 915 | void MediaPlayerPrivateWebM::setVideoFullscreenFrame(const FloatRect& frame) |
| 916 | { |
| 917 | m_renderer->setVideoFullscreenFrame(frame); |
| 918 | } |
| 919 | |
| 920 | void MediaPlayerPrivateWebM::syncTextTrackBounds() |
| 921 | { |
| 922 | m_renderer->syncTextTrackBounds(); |
| 923 | } |
| 924 | |
| 925 | void MediaPlayerPrivateWebM::setTextTrackRepresentation(TextTrackRepresentation* representation) |
| 926 | { |
| 927 | m_renderer->setTextTrackRepresentation(representation); |
| 928 | } |
| 929 | |
| 930 | String MediaPlayerPrivateWebM::engineDescription() const |
| 931 | { |
| 932 | static NeverDestroyed<String> description(MAKE_STATIC_STRING_IMPL("Cocoa WebM Engine")([] { static StaticStringImpl impl("Cocoa WebM Engine"); return &impl; }())); |
| 933 | return description; |
| 934 | } |
| 935 | |
| 936 | #if ENABLE(WIRELESS_PLAYBACK_TARGET)(defined 1 && 1) |
| 937 | void MediaPlayerPrivateWebM::setWirelessPlaybackTarget(Ref<MediaPlaybackTarget>&& target) |
| 938 | { |
| 939 | ALWAYS_LOG(LOGIDENTIFIER)Ref { logger() }->logAlways(logChannel(), WTF::Logger::LogSiteIdentifier (logClassName(), __func__, logIdentifier())); |
| 940 | m_playbackTarget = WTF::move(target); |
| 941 | } |
| 942 | |
| 943 | void MediaPlayerPrivateWebM::setShouldPlayToPlaybackTarget(bool shouldPlayToTarget) |
| 944 | { |
| 945 | if (shouldPlayToTarget == m_shouldPlayToTarget) |
| 946 | return; |
| 947 | |
| 948 | ALWAYS_LOG(LOGIDENTIFIER, shouldPlayToTarget)Ref { logger() }->logAlways(logChannel(), WTF::Logger::LogSiteIdentifier (logClassName(), __func__, logIdentifier()), shouldPlayToTarget ); |
| 949 | m_shouldPlayToTarget = shouldPlayToTarget; |
| 950 | |
| 951 | if (RefPtr player = m_player.get()) |
| 952 | player->currentPlaybackTargetIsWirelessChanged(isCurrentPlaybackTargetWireless()); |
| 953 | } |
| 954 | |
| 955 | bool MediaPlayerPrivateWebM::isCurrentPlaybackTargetWireless() const |
| 956 | { |
| 957 | RefPtr playbackTarget = m_playbackTarget; |
| 958 | if (!playbackTarget) |
| 959 | return false; |
| 960 | |
| 961 | auto hasTarget = m_shouldPlayToTarget && playbackTarget->hasActiveRoute(); |
| 962 | INFO_LOG(LOGIDENTIFIER, hasTarget)Ref { logger() }->info(logChannel(), WTF::Logger::LogSiteIdentifier (logClassName(), __func__, logIdentifier()), hasTarget); |
| 963 | return hasTarget; |
| 964 | } |
| 965 | #endif |
| 966 | |
| 967 | void MediaPlayerPrivateWebM::enqueueSample(Ref<MediaSample>&& sample, TrackID trackId) |
| 968 | { |
| 969 | auto logSiteIdentifier = LOGIDENTIFIERWTF::Logger::LogSiteIdentifier(logClassName(), __func__, logIdentifier ()); |
| 970 | DEBUG_LOG(logSiteIdentifier, "track ID = ", trackId, ", sample = ", sample.get())Ref { logger() }->debug(logChannel(), logSiteIdentifier, "track ID = " , trackId, ", sample = ", sample.get()); |
| 971 | |
| 972 | PlatformSample platformSample = sample->platformSample(); |
| 973 | |
| 974 | CMFormatDescriptionRef formatDescription = PAL::CMSampleBufferGetFormatDescriptionsoftLink_CoreMedia_CMSampleBufferGetFormatDescription(platformSample.cmSampleBuffer()); |
| 975 | ASSERT(formatDescription)((void)0); |
| 976 | if (!formatDescription) { |
| 977 | ERROR_LOG(logSiteIdentifier, "Received sample with a null formatDescription. Bailing.")Ref { logger() }->error(logChannel(), logSiteIdentifier, "Received sample with a null formatDescription. Bailing." ); |
| 978 | return; |
| 979 | } |
| 980 | auto mediaType = PAL::CMFormatDescriptionGetMediaTypesoftLink_CoreMedia_CMFormatDescriptionGetMediaType(formatDescription); |
| 981 | |
| 982 | if (isEnabledVideoTrackID(trackId)) { |
| 983 | // AVSampleBufferDisplayLayer will throw an un-documented exception if passed a sample |
| 984 | // whose media type is not kCMMediaType_Video. This condition is exceptional; we should |
| 985 | // never enqueue a non-video sample in a AVSampleBufferDisplayLayer. |
| 986 | ASSERT(mediaType == kCMMediaType_Video)((void)0); |
| 987 | if (mediaType != kCMMediaType_Video) { |
| 988 | ERROR_LOG(logSiteIdentifier, "Expected sample of type '", FourCC(kCMMediaType_Video), "', got '", FourCC(mediaType), "'. Bailing.")Ref { logger() }->error(logChannel(), logSiteIdentifier, "Expected sample of type '" , FourCC(kCMMediaType_Video), "', got '", FourCC(mediaType), "'. Bailing." ); |
| 989 | return; |
| 990 | } |
| 991 | m_renderer->enqueueSample(trackIdentifierFor(trackId), WTF::move(sample)); |
| 992 | return; |
| 993 | } |
| 994 | // AVSampleBufferAudioRenderer will throw an un-documented exception if passed a sample |
| 995 | // whose media type is not kCMMediaType_Audio. This condition is exceptional; we should |
| 996 | // never enqueue a non-video sample in a AVSampleBufferAudioRenderer. |
| 997 | ASSERT(mediaType == kCMMediaType_Audio)((void)0); |
| 998 | if (mediaType != kCMMediaType_Audio) { |
| 999 | ERROR_LOG(logSiteIdentifier, "Expected sample of type '", FourCC(kCMMediaType_Audio), "', got '", FourCC(mediaType), "'. Bailing.")Ref { logger() }->error(logChannel(), logSiteIdentifier, "Expected sample of type '" , FourCC(kCMMediaType_Audio), "', got '", FourCC(mediaType), "'. Bailing." ); |
| 1000 | return; |
| 1001 | } |
| 1002 | |
| 1003 | if (m_readyState < MediaPlayer::ReadyState::HaveEnoughData && !m_enabledVideoTrackID) |
| 1004 | setReadyState(MediaPlayer::ReadyState::HaveEnoughData); |
| 1005 | |
| 1006 | m_renderer->enqueueSample(trackIdentifierFor(trackId), WTF::move(sample)); |
| 1007 | } |
| 1008 | |
| 1009 | void MediaPlayerPrivateWebM::reenqueSamples(TrackID trackId, NeedsFlush needsFlush) |
| 1010 | { |
| 1011 | auto it = m_trackBufferMap.find(trackId); |
| 1012 | if (it == m_trackBufferMap.end()) |
| 1013 | return; |
| 1014 | TrackBuffer& trackBuffer = it->second; |
| 1015 | trackBuffer.setNeedsReenqueueing(true); |
| 1016 | reenqueueMediaForTime(trackBuffer, trackId, currentTime(), needsFlush); |
| 1017 | } |
| 1018 | |
| 1019 | void MediaPlayerPrivateWebM::reenqueueMediaForTime(const MediaTime& time) |
| 1020 | { |
| 1021 | for (auto& trackBufferPair : m_trackBufferMap) { |
| 1022 | TrackBuffer& trackBuffer = trackBufferPair.second; |
| 1023 | auto trackId = trackBufferPair.first; |
| 1024 | reenqueueMediaForTime(trackBuffer, trackId, time, NeedsFlush::No); |
| 1025 | } |
| 1026 | } |
| 1027 | |
| 1028 | void MediaPlayerPrivateWebM::reenqueueMediaForTime(TrackBuffer& trackBuffer, TrackID trackId, const MediaTime& time, NeedsFlush needsFlush) |
| 1029 | { |
| 1030 | if (needsFlush == NeedsFlush::Yes) |
| 1031 | m_renderer->flushTrack(trackIdentifierFor(trackId)); |
| 1032 | |
| 1033 | if (trackBuffer.reenqueueMediaForTime(time, timeFudgeFactor(), m_loadFinished)) |
| 1034 | provideMediaData(trackBuffer, trackId); |
| 1035 | } |
| 1036 | |
| 1037 | void MediaPlayerPrivateWebM::notifyClientWhenReadyForMoreSamples(TrackID trackId) |
| 1038 | { |
| 1039 | if (m_requestReadyForMoreSamplesSetMap[trackId]) |
| 1040 | return; |
| 1041 | m_requestReadyForMoreSamplesSetMap[trackId] = true; |
| 1042 | |
| 1043 | auto trackIdentifier = maybeTrackIdentifierFor(trackId); |
| 1044 | if (!trackIdentifier) |
| 1045 | return; // track hasn't been enabled yet. |
| 1046 | m_renderer->requestMediaDataWhenReady(*trackIdentifier)->whenSettled(RunLoop::mainSingleton(), [weakThis = ThreadSafeWeakPtr { *this }, trackId](auto&& result) { |
| 1047 | if (RefPtr protectedThis = weakThis.get(); protectedThis && result) |
| 1048 | protectedThis->didBecomeReadyForMoreSamples(trackId); |
| 1049 | }); |
| 1050 | } |
| 1051 | |
| 1052 | bool MediaPlayerPrivateWebM::isReadyForMoreSamples(TrackID trackId) |
| 1053 | { |
| 1054 | auto trackIdentifier = maybeTrackIdentifierFor(trackId); |
| 1055 | return trackIdentifier && m_renderer->isReadyForMoreSamples(*trackIdentifier); |
| 1056 | } |
| 1057 | |
| 1058 | void MediaPlayerPrivateWebM::didBecomeReadyForMoreSamples(TrackID trackId) |
| 1059 | { |
| 1060 | INFO_LOG(LOGIDENTIFIER, trackId)Ref { logger() }->info(logChannel(), WTF::Logger::LogSiteIdentifier (logClassName(), __func__, logIdentifier()), trackId); |
| 1061 | |
| 1062 | m_requestReadyForMoreSamplesSetMap[trackId] = false; |
| 1063 | |
| 1064 | provideMediaData(trackId); |
| 1065 | } |
| 1066 | |
| 1067 | void MediaPlayerPrivateWebM::appendCompleted(bool success) |
| 1068 | { |
| 1069 | assertIsMainThread(); |
| 1070 | |
| 1071 | ASSERT(m_pendingAppends > 0)((void)0); |
| 1072 | m_pendingAppends--; |
| 1073 | INFO_LOG(LOGIDENTIFIER, "pending appends = ", m_pendingAppends, " success = ", success)Ref { logger() }->info(logChannel(), WTF::Logger::LogSiteIdentifier (logClassName(), __func__, logIdentifier()), "pending appends = " , m_pendingAppends, " success = ", success); |
| 1074 | setLoadingProgresssed(true); |
| 1075 | m_errored |= !success; |
| 1076 | if (!m_errored) |
| 1077 | updateBufferedFromTrackBuffers(m_loadFinished && !m_pendingAppends); |
| 1078 | |
| 1079 | if (m_waitForTimeBufferedPromise && m_buffered.containWithEpsilon(m_lastSeekTime, timeFudgeFactor())) { |
| 1080 | ALWAYS_LOG(LOGIDENTIFIER, "can continue seeking data is now buffered")Ref { logger() }->logAlways(logChannel(), WTF::Logger::LogSiteIdentifier (logClassName(), __func__, logIdentifier()), "can continue seeking data is now buffered" ); |
| 1081 | m_waitForTimeBufferedPromise->resolve(); |
| 1082 | m_waitForTimeBufferedPromise.reset(); |
| 1083 | } |
| 1084 | maybeFinishLoading(); |
| 1085 | } |
| 1086 | |
| 1087 | void MediaPlayerPrivateWebM::maybeFinishLoading() |
| 1088 | { |
| 1089 | if (m_loadFinished && !m_pendingAppends) { |
| 1090 | if (!m_hasVideo && !m_hasAudio) { |
| 1091 | ERROR_LOG(LOGIDENTIFIER, "could not load audio or video tracks")Ref { logger() }->error(logChannel(), WTF::Logger::LogSiteIdentifier (logClassName(), __func__, logIdentifier()), "could not load audio or video tracks" ); |
| 1092 | setNetworkState(MediaPlayer::NetworkState::FormatError); |
| 1093 | setReadyState(MediaPlayer::ReadyState::HaveNothing); |
| 1094 | return; |
| 1095 | } |
| 1096 | if (m_errored) { |
| 1097 | ERROR_LOG(LOGIDENTIFIER, "parsing error")Ref { logger() }->error(logChannel(), WTF::Logger::LogSiteIdentifier (logClassName(), __func__, logIdentifier()), "parsing error"); |
| 1098 | setNetworkState(m_readyState >= MediaPlayer::ReadyState::HaveMetadata ? MediaPlayer::NetworkState::DecodeError : MediaPlayer::NetworkState::FormatError); |
| 1099 | return; |
| 1100 | } |
| 1101 | setNetworkState(MediaPlayer::NetworkState::Idle); |
| 1102 | |
| 1103 | updateDurationFromTrackBuffers(); |
| 1104 | } |
| 1105 | } |
| 1106 | |
| 1107 | void MediaPlayerPrivateWebM::provideMediaData(TrackID trackId) |
| 1108 | { |
| 1109 | if (auto it = m_trackBufferMap.find(trackId); it != m_trackBufferMap.end()) |
| 1110 | provideMediaData(it->second, trackId); |
| 1111 | } |
| 1112 | |
| 1113 | void MediaPlayerPrivateWebM::provideMediaData(TrackBuffer& trackBuffer, TrackID trackId) |
| 1114 | { |
| 1115 | if (m_errored) |
| 1116 | return; |
| 1117 | |
| 1118 | if (trackBuffer.needsReenqueueing()) |
| 1119 | return; |
| 1120 | if (isEnabledVideoTrackID(trackId) && m_layerRequiresFlush) |
| 1121 | return; |
| 1122 | |
| 1123 | unsigned enqueuedSamples = 0; |
| 1124 | |
| 1125 | while (true) { |
| 1126 | if (!isReadyForMoreSamples(trackId)) { |
| 1127 | DEBUG_LOG(LOGIDENTIFIER, "bailing early, track id ", trackId, " is not ready for more data")Ref { logger() }->debug(logChannel(), WTF::Logger::LogSiteIdentifier (logClassName(), __func__, logIdentifier()), "bailing early, track id " , trackId, " is not ready for more data"); |
| 1128 | notifyClientWhenReadyForMoreSamples(trackId); |
| 1129 | break; |
| 1130 | } |
| 1131 | |
| 1132 | RefPtr sample = trackBuffer.nextSample(); |
| 1133 | if (!sample) |
| 1134 | break; |
| 1135 | enqueueSample(sample.releaseNonNull(), trackId); |
| 1136 | ++enqueuedSamples; |
| 1137 | } |
| 1138 | |
| 1139 | DEBUG_LOG(LOGIDENTIFIER, "enqueued ", enqueuedSamples, " samples, ", trackBuffer.remainingSamples(), " remaining")Ref { logger() }->debug(logChannel(), WTF::Logger::LogSiteIdentifier (logClassName(), __func__, logIdentifier()), "enqueued ", enqueuedSamples , " samples, ", trackBuffer.remainingSamples(), " remaining"); |
| 1140 | } |
| 1141 | |
| 1142 | void MediaPlayerPrivateWebM::trackDidChangeSelected(VideoTrackPrivate& track, bool selected) |
| 1143 | { |
| 1144 | auto trackId = track.id(); |
| 1145 | |
| 1146 | if (!m_trackBufferMap.contains(trackId)) |
| 1147 | return; |
| 1148 | |
| 1149 | ALWAYS_LOG(LOGIDENTIFIER, "video trackID = ", trackId, ", selected = ", selected)Ref { logger() }->logAlways(logChannel(), WTF::Logger::LogSiteIdentifier (logClassName(), __func__, logIdentifier()), "video trackID = " , trackId, ", selected = ", selected); |
| 1150 | |
| 1151 | if (selected) { |
| 1152 | m_enabledVideoTrackID = trackId; |
| 1153 | m_readyForMoreSamplesMap[trackId] = true; |
| 1154 | m_trackIdentifiers.emplace(trackId, m_renderer->addTrack(TrackType::Video)); |
| 1155 | return; |
| 1156 | } |
| 1157 | |
| 1158 | if (!isEnabledVideoTrackID(trackId)) |
| 1159 | return; |
| 1160 | |
| 1161 | m_enabledVideoTrackID.reset(); |
| 1162 | m_renderer->removeTrack(trackIdentifierFor(trackId)); |
| 1163 | m_trackIdentifiers.erase(trackId); |
| 1164 | m_readyForMoreSamplesMap.erase(trackId); |
| 1165 | } |
| 1166 | |
| 1167 | void MediaPlayerPrivateWebM::trackDidChangeEnabled(AudioTrackPrivate& track, bool enabled) |
| 1168 | { |
| 1169 | auto trackId = track.id(); |
| 1170 | |
| 1171 | if (!m_trackBufferMap.contains(trackId)) |
| 1172 | return; |
| 1173 | |
| 1174 | ALWAYS_LOG(LOGIDENTIFIER, "audio trackID = ", trackId, ", enabled = ", enabled)Ref { logger() }->logAlways(logChannel(), WTF::Logger::LogSiteIdentifier (logClassName(), __func__, logIdentifier()), "audio trackID = " , trackId, ", enabled = ", enabled); |
| 1175 | |
| 1176 | if (enabled) { |
| 1177 | auto trackIdentifier = m_renderer->addTrack(TrackType::Audio); |
| 1178 | m_trackIdentifiers.emplace(trackId, trackIdentifier); |
| 1179 | if (!m_errored) { |
| 1180 | m_readyForMoreSamplesMap[trackId] = true; |
| 1181 | characteristicsChanged(); |
| 1182 | } |
| 1183 | m_renderer->notifyTrackNeedsReenqueuing(trackIdentifier, [weakThis = ThreadSafeWeakPtr { *this }, trackId](TrackIdentifier, const MediaTime&) { |
| 1184 | ensureOnMainThread([weakThis, trackId] { |
| 1185 | if (RefPtr protectedThis = weakThis.get()) |
| 1186 | protectedThis->reenqueSamples(trackId, NeedsFlush::No); |
| 1187 | }); |
| 1188 | }); |
| 1189 | return; |
| 1190 | } |
| 1191 | |
| 1192 | m_renderer->removeTrack(trackIdentifierFor(trackId)); |
| 1193 | m_trackIdentifiers.erase(trackId); |
| 1194 | m_readyForMoreSamplesMap.erase(trackId); |
| 1195 | } |
| 1196 | |
| 1197 | void MediaPlayerPrivateWebM::didParseInitializationData(InitializationSegment&& segment) |
| 1198 | { |
| 1199 | ALWAYS_LOG(LOGIDENTIFIER)Ref { logger() }->logAlways(logChannel(), WTF::Logger::LogSiteIdentifier (logClassName(), __func__, logIdentifier())); |
| 1200 | |
| 1201 | if (m_preload == MediaPlayer::Preload::MetaData && !m_loadFinished) |
| 1202 | cancelLoad(); |
| 1203 | |
| 1204 | clearTracks(); |
| 1205 | |
| 1206 | RefPtr player = m_player.get(); |
| 1207 | for (auto videoTrackInfo : segment.videoTracks) { |
| 1208 | if (videoTrackInfo.track) { |
| 1209 | // FIXME: Use downcast instead. |
| 1210 | auto track = unsafeRefPtrDowncast<VideoTrackPrivateWebM>(videoTrackInfo.track); |
| 1211 | #if PLATFORM(IOS_FAMILY)(defined WTF_PLATFORM_IOS_FAMILY && WTF_PLATFORM_IOS_FAMILY ) |
| 1212 | if (shouldCheckHardwareSupport() && (videoTrackInfo.description->codec() == "vp8"_s || (videoTrackInfo.description->codec() == "vp9"_s && !vp9HardwareDecoderAvailable()))) { |
| 1213 | m_errored = true; |
| 1214 | return; |
| 1215 | } |
| 1216 | #endif |
| 1217 | addTrackBuffer(track->id(), WTF::move(videoTrackInfo.description)); |
| 1218 | |
| 1219 | track->setSelectedChangedCallback([weakThis = ThreadSafeWeakPtr { *this }] (VideoTrackPrivate& track, bool selected) { |
| 1220 | RefPtr protectedThis = weakThis.get(); |
| 1221 | if (!protectedThis) |
| 1222 | return; |
| 1223 | |
| 1224 | auto videoTrackSelectedChanged = [weakThis, trackRef = Ref { track }, selected] { |
| 1225 | if (RefPtr protectedThis = weakThis.get()) |
| 1226 | protectedThis->trackDidChangeSelected(trackRef, selected); |
| 1227 | }; |
| 1228 | |
| 1229 | if (!protectedThis->m_processingInitializationSegment) { |
| 1230 | videoTrackSelectedChanged(); |
| 1231 | return; |
| 1232 | } |
| 1233 | }); |
| 1234 | |
| 1235 | if (m_videoTracks.isEmpty()) { |
| 1236 | setNaturalSize({ float(track->width()), float(track->height()) }); |
| 1237 | track->setSelected(true); |
| 1238 | } |
| 1239 | |
| 1240 | m_videoTracks.append(track); |
| 1241 | if (player) |
| 1242 | player->addVideoTrack(*track); |
| 1243 | } |
| 1244 | } |
| 1245 | |
| 1246 | for (auto audioTrackInfo : segment.audioTracks) { |
| 1247 | if (audioTrackInfo.track) { |
| 1248 | // FIXME: Use downcast instead. |
| 1249 | auto track = unsafeRefPtrDowncast<AudioTrackPrivateWebM>(audioTrackInfo.track); |
| 1250 | addTrackBuffer(track->id(), WTF::move(audioTrackInfo.description)); |
| 1251 | |
| 1252 | track->setEnabledChangedCallback([weakThis = ThreadSafeWeakPtr { *this }] (AudioTrackPrivate& track, bool enabled) { |
| 1253 | RefPtr protectedThis = weakThis.get(); |
| 1254 | if (!protectedThis) |
| 1255 | return; |
| 1256 | |
| 1257 | auto audioTrackEnabledChanged = [weakThis, trackRef = Ref { track }, enabled] { |
| 1258 | if (RefPtr protectedThis = weakThis.get()) |
| 1259 | protectedThis->trackDidChangeEnabled(trackRef, enabled); |
| 1260 | }; |
| 1261 | |
| 1262 | if (!protectedThis->m_processingInitializationSegment) { |
| 1263 | audioTrackEnabledChanged(); |
| 1264 | return; |
| 1265 | } |
| 1266 | }); |
| 1267 | |
| 1268 | if (m_audioTracks.isEmpty()) |
| 1269 | track->setEnabled(true); |
| 1270 | |
| 1271 | m_audioTracks.append(track); |
| 1272 | if (player) |
| 1273 | player->addAudioTrack(*track); |
| 1274 | } |
| 1275 | } |
| 1276 | |
| 1277 | setReadyState(MediaPlayer::ReadyState::HaveMetadata); |
| 1278 | |
| 1279 | if (segment.duration.isValid()) |
| 1280 | setDuration(WTF::move(segment.duration)); |
| 1281 | else |
| 1282 | setDuration(MediaTime::positiveInfiniteTime()); |
| 1283 | } |
| 1284 | |
| 1285 | void MediaPlayerPrivateWebM::didProvideMediaDataForTrackId(Ref<MediaSampleAVFObjC>&& sample, TrackID trackId, const String& mediaType) |
| 1286 | { |
| 1287 | UNUSED_PARAM(mediaType)(void)mediaType; |
| 1288 | |
| 1289 | auto it = m_trackBufferMap.find(trackId); |
| 1290 | if (it == m_trackBufferMap.end()) |
| 1291 | return; |
| 1292 | TrackBuffer& trackBuffer = it->second; |
| 1293 | |
| 1294 | trackBuffer.addSample(sample); |
| 1295 | |
| 1296 | if (m_preload <= MediaPlayer::Preload::MetaData) { |
| 1297 | m_readyForMoreSamplesMap[trackId] = true; |
| 1298 | return; |
| 1299 | } |
| 1300 | if (m_seeking || m_layerRequiresFlush) |
| 1301 | return; |
| 1302 | notifyClientWhenReadyForMoreSamples(trackId); |
| 1303 | } |
| 1304 | |
| 1305 | void MediaPlayerPrivateWebM::flush() |
| 1306 | { |
| 1307 | m_renderer->flush(); |
| 1308 | setHasAvailableVideoFrame(false); |
| 1309 | setAllTracksForReenqueuing(); |
| 1310 | } |
| 1311 | |
| 1312 | void MediaPlayerPrivateWebM::setAllTracksForReenqueuing() |
| 1313 | { |
| 1314 | for (auto& trackBufferPair : m_trackBufferMap) { |
| 1315 | TrackBuffer& trackBuffer = trackBufferPair.second; |
| 1316 | trackBuffer.setNeedsReenqueueing(true); |
| 1317 | } |
| 1318 | } |
| 1319 | |
| 1320 | void MediaPlayerPrivateWebM::setTrackForReenqueuing(TrackID trackId) |
| 1321 | { |
| 1322 | if (auto it = m_trackBufferMap.find(trackId); it != m_trackBufferMap.end()) { |
| 1323 | TrackBuffer& trackBuffer = it->second; |
| 1324 | trackBuffer.setNeedsReenqueueing(true); |
| 1325 | } |
| 1326 | } |
| 1327 | |
| 1328 | void MediaPlayerPrivateWebM::flushVideoIfNeeded() |
| 1329 | { |
| 1330 | ALWAYS_LOG(LOGIDENTIFIER, "layerRequiresFlush: ", m_layerRequiresFlush)Ref { logger() }->logAlways(logChannel(), WTF::Logger::LogSiteIdentifier (logClassName(), __func__, logIdentifier()), "layerRequiresFlush: " , m_layerRequiresFlush); |
| 1331 | if (!m_layerRequiresFlush) |
| 1332 | return; |
| 1333 | |
| 1334 | m_layerRequiresFlush = false; |
| 1335 | |
| 1336 | if (m_enabledVideoTrackID) |
| 1337 | reenqueSamples(*m_enabledVideoTrackID, NeedsFlush::Yes); |
| 1338 | } |
| 1339 | |
| 1340 | void MediaPlayerPrivateWebM::addTrackBuffer(TrackID trackId, RefPtr<MediaDescription>&& description) |
| 1341 | { |
| 1342 | ASSERT(!m_trackBufferMap.contains(trackId))((void)0); |
| 1343 | |
| 1344 | setHasAudio(m_hasAudio || description->isAudio()); |
| 1345 | setHasVideo(m_hasVideo || description->isVideo()); |
| 1346 | |
| 1347 | auto trackBuffer = TrackBuffer::create(WTF::move(description), discontinuityTolerance); |
| 1348 | trackBuffer->setLogger(protectedLogger(), logIdentifier()); |
| 1349 | m_trackBufferMap.try_emplace(trackId, WTF::move(trackBuffer)); |
| 1350 | m_requestReadyForMoreSamplesSetMap[trackId] = false; |
| 1351 | } |
| 1352 | |
| 1353 | void MediaPlayerPrivateWebM::clearTracks() |
| 1354 | { |
| 1355 | RefPtr player = m_player.get(); |
| 1356 | for (auto& track : m_videoTracks) { |
| 1357 | track->setSelectedChangedCallback(nullptr); |
| 1358 | if (player) |
| 1359 | player->removeVideoTrack(*track); |
| 1360 | } |
| 1361 | m_videoTracks.clear(); |
| 1362 | |
| 1363 | for (auto& track : m_audioTracks) { |
| 1364 | track->setEnabledChangedCallback(nullptr); |
| 1365 | if (player) |
| 1366 | player->removeAudioTrack(*track); |
| 1367 | } |
| 1368 | m_audioTracks.clear(); |
| 1369 | } |
| 1370 | |
| 1371 | void MediaPlayerPrivateWebM::startVideoFrameMetadataGathering() |
| 1372 | { |
| 1373 | m_isGatheringVideoFrameMetadata = true; |
| 1374 | m_renderer->notifyWhenHasAvailableVideoFrame([weakThis = ThreadSafeWeakPtr { *this }](const MediaTime& presentationTime, double displayTime) { |
| 1375 | ensureOnMainThread([weakThis, presentationTime, displayTime] { |
| 1376 | if (RefPtr protectedThis = weakThis.get()) |
| 1377 | protectedThis->checkNewVideoFrameMetadata(presentationTime, displayTime); |
| 1378 | }); |
| 1379 | }); |
| 1380 | } |
| 1381 | |
| 1382 | void MediaPlayerPrivateWebM::stopVideoFrameMetadataGathering() |
| 1383 | { |
| 1384 | m_isGatheringVideoFrameMetadata = false; |
| 1385 | m_videoFrameMetadata = { }; |
| 1386 | m_renderer->notifyWhenHasAvailableVideoFrame(nullptr); |
| 1387 | } |
| 1388 | |
| 1389 | void MediaPlayerPrivateWebM::checkNewVideoFrameMetadata(const MediaTime& presentationTime, double displayTime) |
| 1390 | { |
| 1391 | RefPtr player = m_player.get(); |
| 1392 | if (!player) |
| 1393 | return; |
| 1394 | |
| 1395 | if (!updateLastVideoFrame()) |
| 1396 | return; |
| 1397 | |
| 1398 | Ref lastVideoFrame = *m_lastVideoFrame; |
| 1399 | |
| 1400 | #ifndef NDEBUG1 |
| 1401 | if (lastVideoFrame->presentationTime() != presentationTime) |
| 1402 | ALWAYS_LOG(LOGIDENTIFIER, "notification of new frame delayed retrieved:", m_lastVideoFrame->presentationTime(), " expected:", presentationTime)Ref { logger() }->logAlways(logChannel(), WTF::Logger::LogSiteIdentifier (logClassName(), __func__, logIdentifier()), "notification of new frame delayed retrieved:" , m_lastVideoFrame->presentationTime(), " expected:", presentationTime ); |
| 1403 | #else |
| 1404 | UNUSED_PARAM(presentationTime)(void)presentationTime; |
| 1405 | #endif |
| 1406 | VideoFrameMetadata metadata; |
| 1407 | metadata.width = m_naturalSize.width(); |
| 1408 | metadata.height = m_naturalSize.height(); |
| 1409 | auto metrics = m_renderer->videoPlaybackQualityMetrics(); |
| 1410 | metadata.presentedFrames = metrics ? metrics->displayCompositedVideoFrames : 0; |
| 1411 | metadata.presentationTime = displayTime; |
| 1412 | metadata.expectedDisplayTime = displayTime; |
| 1413 | metadata.mediaTime = lastVideoFrame->presentationTime().toDouble(); |
| 1414 | |
| 1415 | m_videoFrameMetadata = metadata; |
| 1416 | player->onNewVideoFrameMetadata(WTF::move(metadata), lastVideoFrame->pixelBuffer()); |
| 1417 | } |
| 1418 | |
| 1419 | void MediaPlayerPrivateWebM::setResourceOwner(const ProcessIdentity& resourceOwner) |
| 1420 | { |
| 1421 | m_renderer->setResourceOwner(resourceOwner); |
| 1422 | } |
| 1423 | |
| 1424 | WTFLogChannel& MediaPlayerPrivateWebM::logChannel() const |
| 1425 | { |
| 1426 | return LogMedia; |
| 1427 | } |
| 1428 | |
| 1429 | class MediaPlayerFactoryWebM final : public MediaPlayerFactory { |
| 1430 | WTF_MAKE_TZONE_ALLOCATED_INLINE(MediaPlayerFactoryWebM)public: public: using HeapRef = ::bmalloc::api::HeapRef; using SizeAndAlignment = ::bmalloc::api::SizeAndAlignment; using TZoneMallocFallback = ::bmalloc::api::TZoneMallocFallback; using CompactAllocationMode = ::bmalloc::CompactAllocationMode; static constexpr bool usesTZoneHeap () { return true; } static constexpr unsigned inheritedSizeClass () { return ::bmalloc::TZone::sizeClass<MediaPlayerFactoryWebM >(); } static constexpr unsigned inheritedAlignment() { return ::bmalloc::TZone::alignment<MediaPlayerFactoryWebM>(); } __attribute__((always_inline)) inline void* operator new(size_t , void* p) { return p; } __attribute__((always_inline)) inline void* operator new[](size_t, void* p) { return p; } void* operator new[](size_t size) = delete; void operator delete[](void* p) = delete; __attribute__((always_inline)) inline void* operator new(size_t, NotNullTag, void* location) { ((void)0); return location ; } void* operator new(size_t size) { static HeapRef s_heapRef ; static const TZoneSpecification s_heapSpec = { &s_heapRef , sizeof(MediaPlayerFactoryWebM), ::bmalloc::api::compactAllocationMode <MediaPlayerFactoryWebM>(), SizeAndAlignment::encode< MediaPlayerFactoryWebM>() }; if (!s_heapRef || size != sizeof (MediaPlayerFactoryWebM)) { [[unlikely]] if constexpr (::bmalloc ::api::compactAllocationMode<MediaPlayerFactoryWebM>() == CompactAllocationMode::Compact) return ::bmalloc::api::tzoneAllocateCompactSlow (size, s_heapSpec); return ::bmalloc::api::tzoneAllocateNonCompactSlow (size, s_heapSpec); } ; if constexpr (::bmalloc::api::compactAllocationMode <MediaPlayerFactoryWebM>() == CompactAllocationMode::Compact ) return ::bmalloc::api::tzoneAllocateCompact(s_heapRef); return ::bmalloc::api::tzoneAllocateNonCompact(s_heapRef); } __attribute__ ((always_inline)) inline void operator delete(void* p) { ::bmalloc ::api::tzoneFree(p); } __attribute__((always_inline)) inline static void freeAfterDestruction(void* p) { ::bmalloc::api::tzoneFree (p); } using WTFIsFastMallocAllocated = int; private: using __makeBtzoneMallocedInlineMacroSemicolonifier __attribute__((unused)) = int; |
| 1431 | WTF_OVERRIDE_DELETE_FOR_CHECKED_PTR(MediaPlayerFactoryWebM)public: void operator delete(MediaPlayerFactoryWebM* object, std ::destroying_delete_t, size_t size) { ((void)size); object-> setDidBeginCheckedPtrDeletion(); object->MediaPlayerFactoryWebM ::~MediaPlayerFactoryWebM(); if (object->checkedPtrCountWithoutThreadCheck ()) [[unlikely]] { zeroBytes(*object); return; } MediaPlayerFactoryWebM ::operator delete(object); } using WTFDidOverrideDeleteForCheckedPtr = int; private: using __thisIsHereToForceASemicolonAfterWTFOverrideDelete __attribute__((unused)) = int; |
| 1432 | private: |
| 1433 | MediaPlayerEnums::MediaEngineIdentifier identifier() const final { return MediaPlayerEnums::MediaEngineIdentifier::CocoaWebM; }; |
| 1434 | |
| 1435 | Ref<MediaPlayerPrivateInterface> createMediaEnginePlayer(MediaPlayer& player) const final |
| 1436 | { |
| 1437 | return MediaPlayerPrivateWebM::create(player); |
| 1438 | } |
| 1439 | |
| 1440 | void getSupportedTypes(HashSet<String>& types) const final |
| 1441 | { |
| 1442 | return MediaPlayerPrivateWebM::getSupportedTypes(types); |
| 1443 | } |
| 1444 | |
| 1445 | MediaPlayer::SupportsType supportsTypeAndCodecs(const MediaEngineSupportParameters& parameters) const final |
| 1446 | { |
| 1447 | return MediaPlayerPrivateWebM::supportsType(parameters); |
| 1448 | } |
| 1449 | }; |
| 1450 | |
| 1451 | void MediaPlayerPrivateWebM::registerMediaEngine(MediaEngineRegistrar registrar) |
| 1452 | { |
| 1453 | if (!isAvailable()) |
| 1454 | return; |
| 1455 | |
| 1456 | registrar(makeUnique<MediaPlayerFactoryWebM>()); |
| 1457 | } |
| 1458 | |
| 1459 | bool MediaPlayerPrivateWebM::isAvailable() |
| 1460 | { |
| 1461 | return SourceBufferParserWebM::isAvailable() |
| 1462 | && PAL::isAVFoundationFrameworkAvailable() |
| 1463 | && PAL::isCoreMediaFrameworkAvailable() |
| 1464 | && PAL::getAVSampleBufferAudioRendererClassSingleton() |
| 1465 | && PAL::getAVSampleBufferRenderSynchronizerClassSingleton() |
| 1466 | && class_getInstanceMethod(PAL::getAVSampleBufferAudioRendererClassSingleton(), @selector(setMuted:)); |
| 1467 | } |
| 1468 | |
| 1469 | bool MediaPlayerPrivateWebM::isEnabledVideoTrackID(TrackID trackID) const |
| 1470 | { |
| 1471 | return m_enabledVideoTrackID && *m_enabledVideoTrackID == trackID; |
| 1472 | } |
| 1473 | |
| 1474 | bool MediaPlayerPrivateWebM::hasSelectedVideo() const |
| 1475 | { |
| 1476 | return !!m_enabledVideoTrackID; |
| 1477 | } |
| 1478 | |
| 1479 | void MediaPlayerPrivateWebM::setShouldDisableHDR(bool shouldDisable) |
| 1480 | { |
| 1481 | m_renderer->setShouldDisableHDR(shouldDisable); |
| 1482 | } |
| 1483 | |
| 1484 | void MediaPlayerPrivateWebM::setPlatformDynamicRangeLimit(PlatformDynamicRangeLimit platformDynamicRangeLimit) |
| 1485 | { |
| 1486 | m_renderer->setPlatformDynamicRangeLimit(platformDynamicRangeLimit); |
| 1487 | } |
| 1488 | |
| 1489 | void MediaPlayerPrivateWebM::playerContentBoxRectChanged(const LayoutRect& newRect) |
| 1490 | { |
| 1491 | m_renderer->contentBoxRectChanged(newRect); |
| 1492 | } |
| 1493 | |
| 1494 | void MediaPlayerPrivateWebM::setShouldMaintainAspectRatio(bool shouldMaintainAspectRatio) |
| 1495 | { |
| 1496 | m_renderer->setShouldMaintainAspectRatio(shouldMaintainAspectRatio); |
| 1497 | } |
| 1498 | |
| 1499 | #if HAVE(SPATIAL_TRACKING_LABEL)(defined HAVE_SPATIAL_TRACKING_LABEL && HAVE_SPATIAL_TRACKING_LABEL ) |
| 1500 | String MediaPlayerPrivateWebM::defaultSpatialTrackingLabel() const |
| 1501 | { |
| 1502 | return m_defaultSpatialTrackingLabel; |
| 1503 | } |
| 1504 | |
| 1505 | void MediaPlayerPrivateWebM::setDefaultSpatialTrackingLabel(const String& defaultSpatialTrackingLabel) |
| 1506 | { |
| 1507 | if (m_defaultSpatialTrackingLabel == defaultSpatialTrackingLabel) |
| 1508 | return; |
| 1509 | m_defaultSpatialTrackingLabel = defaultSpatialTrackingLabel; |
| 1510 | updateSpatialTrackingLabel(); |
| 1511 | } |
| 1512 | |
| 1513 | String MediaPlayerPrivateWebM::spatialTrackingLabel() const |
| 1514 | { |
| 1515 | return m_spatialTrackingLabel; |
| 1516 | } |
| 1517 | |
| 1518 | void MediaPlayerPrivateWebM::setSpatialTrackingLabel(const String& spatialTrackingLabel) |
| 1519 | { |
| 1520 | if (m_spatialTrackingLabel == spatialTrackingLabel) |
| 1521 | return; |
| 1522 | m_spatialTrackingLabel = spatialTrackingLabel; |
| 1523 | updateSpatialTrackingLabel(); |
| 1524 | } |
| 1525 | |
| 1526 | void MediaPlayerPrivateWebM::updateSpatialTrackingLabel() |
| 1527 | { |
| 1528 | #if HAVE(SPATIAL_AUDIO_EXPERIENCE)(defined HAVE_SPATIAL_AUDIO_EXPERIENCE && HAVE_SPATIAL_AUDIO_EXPERIENCE ) |
| 1529 | RefPtr player = m_player.get(); |
| 1530 | m_renderer->setSpatialTrackingInfo(player && player->prefersSpatialAudioExperience(), player ? player->soundStageSize() : MediaPlayer::SoundStageSize::Auto, player ? player->sceneIdentifier() : emptyString(), m_defaultSpatialTrackingLabel, m_spatialTrackingLabel); |
| 1531 | #else |
| 1532 | m_renderer->setSpatialTrackingInfo(false, MediaPlayer::SoundStageSize::Auto, { }, m_defaultSpatialTrackingLabel, m_spatialTrackingLabel); |
| 1533 | #endif |
| 1534 | } |
| 1535 | #endif |
| 1536 | |
| 1537 | #if ENABLE(LINEAR_MEDIA_PLAYER)(defined ENABLE_LINEAR_MEDIA_PLAYER && ENABLE_LINEAR_MEDIA_PLAYER ) |
| 1538 | void MediaPlayerPrivateWebM::setVideoTarget(const PlatformVideoTarget& videoTarget) |
| 1539 | { |
| 1540 | ALWAYS_LOG(LOGIDENTIFIER, !!videoTarget)Ref { logger() }->logAlways(logChannel(), WTF::Logger::LogSiteIdentifier (logClassName(), __func__, logIdentifier()), !!videoTarget); |
| 1541 | m_renderer->setVideoTarget(videoTarget); |
| 1542 | } |
| 1543 | #endif |
| 1544 | |
| 1545 | #if PLATFORM(IOS_FAMILY)(defined WTF_PLATFORM_IOS_FAMILY && WTF_PLATFORM_IOS_FAMILY ) |
| 1546 | void MediaPlayerPrivateWebM::sceneIdentifierDidChange() |
| 1547 | { |
| 1548 | #if HAVE(SPATIAL_TRACKING_LABEL)(defined HAVE_SPATIAL_TRACKING_LABEL && HAVE_SPATIAL_TRACKING_LABEL ) |
| 1549 | updateSpatialTrackingLabel(); |
| 1550 | #endif |
| 1551 | } |
| 1552 | |
| 1553 | void MediaPlayerPrivateWebM::applicationWillResignActive() |
| 1554 | { |
| 1555 | ALWAYS_LOG(LOGIDENTIFIER)Ref { logger() }->logAlways(logChannel(), WTF::Logger::LogSiteIdentifier (logClassName(), __func__, logIdentifier())); |
| 1556 | m_renderer->applicationWillResignActive(); |
| 1557 | m_applicationIsActive = false; |
| 1558 | } |
| 1559 | |
| 1560 | void MediaPlayerPrivateWebM::applicationDidBecomeActive() |
| 1561 | { |
| 1562 | ALWAYS_LOG(LOGIDENTIFIER)Ref { logger() }->logAlways(logChannel(), WTF::Logger::LogSiteIdentifier (logClassName(), __func__, logIdentifier())); |
| 1563 | m_applicationIsActive = true; |
| 1564 | flushVideoIfNeeded(); |
| 1565 | } |
| 1566 | #endif |
| 1567 | |
| 1568 | void MediaPlayerPrivateWebM::isInFullscreenOrPictureInPictureChanged(bool isInFullscreenOrPictureInPicture) |
| 1569 | { |
| 1570 | m_renderer->isInFullscreenOrPictureInPictureChanged(isInFullscreenOrPictureInPicture); |
| 1571 | } |
| 1572 | |
| 1573 | AudioVideoRenderer::TrackIdentifier MediaPlayerPrivateWebM::trackIdentifierFor(TrackID trackID) const |
| 1574 | { |
| 1575 | auto it = m_trackIdentifiers.find(trackID); |
| 1576 | ASSERT(it != m_trackIdentifiers.end())((void)0); |
| 1577 | return it->second; |
| 1578 | } |
| 1579 | |
| 1580 | std::optional<AudioVideoRenderer::TrackIdentifier> MediaPlayerPrivateWebM::maybeTrackIdentifierFor(TrackID trackID) const |
| 1581 | { |
| 1582 | if (auto it = m_trackIdentifiers.find(trackID); it != m_trackIdentifiers.end()) |
| 1583 | return it->second; |
| 1584 | return { }; |
| 1585 | } |
| 1586 | |
| 1587 | void MediaPlayerPrivateWebM::setLayerRequiresFlush() |
| 1588 | { |
| 1589 | ALWAYS_LOG(LOGIDENTIFIER)Ref { logger() }->logAlways(logChannel(), WTF::Logger::LogSiteIdentifier (logClassName(), __func__, logIdentifier())); |
| 1590 | m_layerRequiresFlush = true; |
| 1591 | #if PLATFORM(IOS_FAMILY)(defined WTF_PLATFORM_IOS_FAMILY && WTF_PLATFORM_IOS_FAMILY ) |
| 1592 | if (m_applicationIsActive) |
| 1593 | flushVideoIfNeeded(); |
| 1594 | #else |
| 1595 | flushVideoIfNeeded(); |
| 1596 | #endif |
| 1597 | } |
| 1598 | |
| 1599 | std::optional<VideoPlaybackQualityMetrics> MediaPlayerPrivateWebM::videoPlaybackQualityMetrics() |
| 1600 | { |
| 1601 | return m_renderer->videoPlaybackQualityMetrics(); |
| 1602 | } |
| 1603 | |
| 1604 | WebCore::HostingContext MediaPlayerPrivateWebM::hostingContext() const |
| 1605 | { |
| 1606 | return m_renderer->hostingContext(); |
| 1607 | } |
| 1608 | |
| 1609 | void MediaPlayerPrivateWebM::setVideoLayerSizeFenced(const WebCore::FloatSize& size, WTF::MachSendRightAnnotated&& sendRightAnnotated) |
| 1610 | { |
| 1611 | m_renderer->setVideoLayerSizeFenced(size, WTF::move(sendRightAnnotated)); |
| 1612 | } |
| 1613 | |
| 1614 | void MediaPlayerPrivateWebM::monitorReadyState() |
| 1615 | { |
| 1616 | if (!m_buffered.length()) |
| 1617 | return; |
| 1618 | // If we have data up to 3s ahead, we can assume that we can play without interruption. |
| 1619 | constexpr double kHaveEnoughDataThreshold = 3; |
| 1620 | auto currentTime = this->currentTime(); |
| 1621 | MediaTime aheadTime = std::min(duration(), currentTime + MediaTime::createWithDouble(kHaveEnoughDataThreshold)); |
| 1622 | PlatformTimeRanges neededBufferedRange { currentTime, std::max(currentTime, aheadTime) }; |
| 1623 | setReadyState(m_buffered.containWithEpsilon(neededBufferedRange, MediaTime(2002, 24000)) ? MediaPlayer::ReadyState::HaveEnoughData : MediaPlayer::ReadyState::HaveFutureData); |
| 1624 | } |
| 1625 | |
| 1626 | } // namespace WebCore |
| 1627 | |
| 1628 | #endif // ENABLE(COCOA_WEBM_PLAYER) |