summaryrefslogtreecommitdiff
path: root/mozilla-1619882-2.patch
diff options
context:
space:
mode:
Diffstat (limited to 'mozilla-1619882-2.patch')
-rw-r--r--mozilla-1619882-2.patch190
1 files changed, 0 insertions, 190 deletions
diff --git a/mozilla-1619882-2.patch b/mozilla-1619882-2.patch
deleted file mode 100644
index dc3ab77..0000000
--- a/mozilla-1619882-2.patch
+++ /dev/null
@@ -1,190 +0,0 @@
-diff --git a/dom/media/platforms/ffmpeg/FFmpegVideoDecoder.h b/dom/media/platforms/ffmpeg/FFmpegVideoDecoder.h
---- a/dom/media/platforms/ffmpeg/FFmpegVideoDecoder.h
-+++ b/dom/media/platforms/ffmpeg/FFmpegVideoDecoder.h
-@@ -10,21 +10,45 @@
- #include "FFmpegLibWrapper.h"
- #include "FFmpegDataDecoder.h"
- #include "SimpleMap.h"
-+#ifdef MOZ_WAYLAND_USE_VAAPI
-+# include "mozilla/widget/WaylandDMABufSurface.h"
-+# include <list>
-+#endif
-
- namespace mozilla {
-
- #ifdef MOZ_WAYLAND_USE_VAAPI
--class VAAPIFrameHolder {
-+// When VA-API decoding is running, ffmpeg allocates AVHWFramesContext - a pool
-+// of "hardware" frames. Every "hardware" frame (VASurface) is backed
-+// by actual piece of GPU memory which holds the decoded image data.
-+//
-+// The VASurface is wrapped by WaylandDMABufSurface and transferred to
-+// rendering queue by WaylandDMABUFSurfaceImage, where TextureClient is
-+// created and VASurface is used as a texture there.
-+//
-+// As there's a limited number of VASurfaces, ffmpeg reuses them to decode
-+// next frames ASAP even if they are still attached to WaylandDMABufSurface
-+// and used as a texture in our rendering engine.
-+//
-+// Unfortunately there isn't any obvious way how to mark particular VASurface
-+// as used. The best we can do is to hold a reference to particular AVBuffer
-+// from decoded AVFrame and AVHWFramesContext which owns the AVBuffer.
-+
-+class VAAPIFrameHolder final {
- public:
-- VAAPIFrameHolder(FFmpegLibWrapper* aLib, AVBufferRef* aVAAPIDeviceContext,
-- AVBufferRef* aAVHWFramesContext, AVBufferRef* aHWFrame);
-+ VAAPIFrameHolder(FFmpegLibWrapper* aLib, WaylandDMABufSurface* aSurface,
-+ AVCodecContext* aAVCodecContext, AVFrame* aAVFrame);
- ~VAAPIFrameHolder();
-
-+ // Check if WaylandDMABufSurface is used by any gecko rendering process
-+ // (WebRender or GL compositor) or by WaylandDMABUFSurfaceImage/VideoData.
-+ bool IsUsed() const { return mSurface->IsGlobalRefSet(); }
-+
- private:
-- FFmpegLibWrapper* mLib;
-- AVBufferRef* mVAAPIDeviceContext;
-+ const FFmpegLibWrapper* mLib;
-+ const RefPtr<WaylandDMABufSurface> mSurface;
- AVBufferRef* mAVHWFramesContext;
-- AVBufferRef* mHWFrame;
-+ AVBufferRef* mHWAVBuffer;
- };
- #endif
-
-@@ -97,6 +121,8 @@
-
- MediaResult CreateImageVAAPI(int64_t aOffset, int64_t aPts, int64_t aDuration,
- MediaDataDecoder::DecodedData& aResults);
-+ void ReleaseUnusedVAAPIFrames();
-+ void ReleaseAllVAAPIFrames();
- #endif
-
- /**
-@@ -112,6 +138,7 @@
- AVBufferRef* mVAAPIDeviceContext;
- const bool mDisableHardwareDecoding;
- VADisplay mDisplay;
-+ std::list<UniquePtr<VAAPIFrameHolder>> mFrameHolders;
- #endif
- RefPtr<KnowsCompositor> mImageAllocator;
- RefPtr<ImageContainer> mImageContainer;
-diff --git a/dom/media/platforms/ffmpeg/FFmpegVideoDecoder.cpp b/dom/media/platforms/ffmpeg/FFmpegVideoDecoder.cpp
---- a/dom/media/platforms/ffmpeg/FFmpegVideoDecoder.cpp
-+++ b/dom/media/platforms/ffmpeg/FFmpegVideoDecoder.cpp
-@@ -123,18 +123,27 @@
- }
-
- VAAPIFrameHolder::VAAPIFrameHolder(FFmpegLibWrapper* aLib,
-- AVBufferRef* aVAAPIDeviceContext,
-- AVBufferRef* aAVHWFramesContext,
-- AVBufferRef* aHWFrame)
-+ WaylandDMABufSurface* aSurface,
-+ AVCodecContext* aAVCodecContext,
-+ AVFrame* aAVFrame)
- : mLib(aLib),
-- mVAAPIDeviceContext(mLib->av_buffer_ref(aVAAPIDeviceContext)),
-- mAVHWFramesContext(mLib->av_buffer_ref(aAVHWFramesContext)),
-- mHWFrame(mLib->av_buffer_ref(aHWFrame)){};
-+ mSurface(aSurface),
-+ mAVHWFramesContext(mLib->av_buffer_ref(aAVCodecContext->hw_frames_ctx)),
-+ mHWAVBuffer(mLib->av_buffer_ref(aAVFrame->buf[0])) {
-+ FFMPEG_LOG("VAAPIFrameHolder is adding dmabuf surface UID = %d\n",
-+ mSurface->GetUID());
-+
-+ // Create global refcount object to track mSurface usage over
-+ // gects rendering engine. We can't release it until it's used
-+ // by GL compositor / WebRender.
-+ mSurface->GlobalRefCountCreate();
-+}
-
- VAAPIFrameHolder::~VAAPIFrameHolder() {
-- mLib->av_buffer_unref(&mHWFrame);
-+ FFMPEG_LOG("VAAPIFrameHolder is releasing dmabuf surface UID = %d\n",
-+ mSurface->GetUID());
-+ mLib->av_buffer_unref(&mHWAVBuffer);
- mLib->av_buffer_unref(&mAVHWFramesContext);
-- mLib->av_buffer_unref(&mVAAPIDeviceContext);
- }
-
- AVCodec* FFmpegVideoDecoder<LIBAV_VER>::FindVAAPICodec() {
-@@ -422,6 +431,13 @@
- NS_WARNING("FFmpeg h264 decoder failed to allocate frame.");
- return MediaResult(NS_ERROR_OUT_OF_MEMORY, __func__);
- }
-+
-+# ifdef MOZ_WAYLAND_USE_VAAPI
-+ if (mVAAPIDeviceContext) {
-+ ReleaseUnusedVAAPIFrames();
-+ }
-+# endif
-+
- res = mLib->avcodec_receive_frame(mCodecContext, mFrame);
- if (res == int(AVERROR_EOF)) {
- return NS_ERROR_DOM_MEDIA_END_OF_STREAM;
-@@ -628,9 +644,20 @@
- }
-
- #ifdef MOZ_WAYLAND_USE_VAAPI
--static void VAAPIFrameReleaseCallback(VAAPIFrameHolder* aVAAPIFrameHolder) {
-- auto frameHolder = static_cast<VAAPIFrameHolder*>(aVAAPIFrameHolder);
-- delete frameHolder;
-+void FFmpegVideoDecoder<LIBAV_VER>::ReleaseUnusedVAAPIFrames() {
-+ std::list<UniquePtr<VAAPIFrameHolder>>::iterator holder =
-+ mFrameHolders.begin();
-+ while (holder != mFrameHolders.end()) {
-+ if (!(*holder)->IsUsed()) {
-+ holder = mFrameHolders.erase(holder);
-+ } else {
-+ holder++;
-+ }
-+ }
-+}
-+
-+void FFmpegVideoDecoder<LIBAV_VER>::ReleaseAllVAAPIFrames() {
-+ mFrameHolders.clear();
- }
-
- MediaResult FFmpegVideoDecoder<LIBAV_VER>::CreateImageVAAPI(
-@@ -667,20 +694,20 @@
- RESULT_DETAIL("Unable to allocate WaylandDMABufSurfaceNV12."));
- }
-
-+# ifdef MOZ_LOGGING
-+ static int uid = 0;
-+ surface->SetUID(++uid);
-+ FFMPEG_LOG("Created dmabuf UID = %d HW surface %x\n", uid, surface_id);
-+# endif
-+
- surface->SetYUVColorSpace(GetFrameColorSpace());
-
-- // mFrame->buf[0] is a reference to H264 VASurface for this mFrame.
-- // We need create WaylandDMABUFSurfaceImage on top of it,
-- // create EGLImage/Texture on top of it and render it by GL.
-+ // Store reference to the decoded HW buffer, see VAAPIFrameHolder struct.
-+ auto holder =
-+ MakeUnique<VAAPIFrameHolder>(mLib, surface, mCodecContext, mFrame);
-+ mFrameHolders.push_back(std::move(holder));
-
-- // FFmpeg tends to reuse the particual VASurface for another frame
-- // even when the mFrame is not released. To keep VASurface as is
-- // we explicitly reference it and keep until WaylandDMABUFSurfaceImage
-- // is live.
-- RefPtr<layers::Image> im = new layers::WaylandDMABUFSurfaceImage(
-- surface, VAAPIFrameReleaseCallback,
-- new VAAPIFrameHolder(mLib, mVAAPIDeviceContext,
-- mCodecContext->hw_frames_ctx, mFrame->buf[0]));
-+ RefPtr<layers::Image> im = new layers::WaylandDMABUFSurfaceImage(surface);
-
- RefPtr<VideoData> vp = VideoData::CreateFromImage(
- mInfo.mDisplay, aOffset, TimeUnit::FromMicroseconds(aPts),
-@@ -732,6 +759,7 @@
- void FFmpegVideoDecoder<LIBAV_VER>::ProcessShutdown() {
- #ifdef MOZ_WAYLAND_USE_VAAPI
- if (mVAAPIDeviceContext) {
-+ ReleaseAllVAAPIFrames();
- mLib->av_buffer_unref(&mVAAPIDeviceContext);
- }
- #endif
-
bgstack15