summaryrefslogtreecommitdiff
path: root/mozilla-1619882-2.patch
diff options
context:
space:
mode:
authorMartin Stransky <stransky@redhat.com>2020-05-29 11:31:41 +0200
committerMartin Stransky <stransky@redhat.com>2020-05-29 11:31:41 +0200
commit5116ef14ae33365e43aa45375df466917fbda054 (patch)
tree8ca517d917d299d216e15e20fb965748960de7d8 /mozilla-1619882-2.patch
parentAdded fix for mozbz#1632456 (diff)
downloadlibrewolf-fedora-ff-5116ef14ae33365e43aa45375df466917fbda054.tar.gz
librewolf-fedora-ff-5116ef14ae33365e43aa45375df466917fbda054.tar.bz2
librewolf-fedora-ff-5116ef14ae33365e43aa45375df466917fbda054.zip
Updated to 77.0
Diffstat (limited to 'mozilla-1619882-2.patch')
-rw-r--r--mozilla-1619882-2.patch123
1 files changed, 79 insertions, 44 deletions
diff --git a/mozilla-1619882-2.patch b/mozilla-1619882-2.patch
index 2733186..dc3ab77 100644
--- a/mozilla-1619882-2.patch
+++ b/mozilla-1619882-2.patch
@@ -1,7 +1,7 @@
diff --git a/dom/media/platforms/ffmpeg/FFmpegVideoDecoder.h b/dom/media/platforms/ffmpeg/FFmpegVideoDecoder.h
--- a/dom/media/platforms/ffmpeg/FFmpegVideoDecoder.h
+++ b/dom/media/platforms/ffmpeg/FFmpegVideoDecoder.h
-@@ -10,17 +10,26 @@
+@@ -10,21 +10,45 @@
#include "FFmpegLibWrapper.h"
#include "FFmpegDataDecoder.h"
#include "SimpleMap.h"
@@ -13,31 +13,56 @@ diff --git a/dom/media/platforms/ffmpeg/FFmpegVideoDecoder.h b/dom/media/platfor
namespace mozilla {
#ifdef MOZ_WAYLAND_USE_VAAPI
+-class VAAPIFrameHolder {
++// When VA-API decoding is running, ffmpeg allocates AVHWFramesContext - a pool
++// of "hardware" frames. Every "hardware" frame (VASurface) is backed
++// by actual piece of GPU memory which holds the decoded image data.
++//
++// The VASurface is wrapped by WaylandDMABufSurface and transferred to
++// rendering queue by WaylandDMABUFSurfaceImage, where TextureClient is
++// created and VASurface is used as a texture there.
++//
++// As there's a limited number of VASurfaces, ffmpeg reuses them to decode
++// next frames ASAP even if they are still attached to WaylandDMABufSurface
++// and used as a texture in our rendering engine.
++//
++// Unfortunately there isn't any obvious way how to mark particular VASurface
++// as used. The best we can do is to hold a reference to particular AVBuffer
++// from decoded AVFrame and AVHWFramesContext which owns the AVBuffer.
+
- class VAAPIFrameHolder {
++class VAAPIFrameHolder final {
public:
- VAAPIFrameHolder(FFmpegLibWrapper* aLib, AVBufferRef* aVAAPIDeviceContext,
-+ VAAPIFrameHolder(RefPtr<WaylandDMABufSurface> aSurface,
-+ FFmpegLibWrapper* aLib, AVBufferRef* aVAAPIDeviceContext,
- AVBufferRef* aAVHWFramesContext, AVBufferRef* aHWFrame);
+- AVBufferRef* aAVHWFramesContext, AVBufferRef* aHWFrame);
++ VAAPIFrameHolder(FFmpegLibWrapper* aLib, WaylandDMABufSurface* aSurface,
++ AVCodecContext* aAVCodecContext, AVFrame* aAVFrame);
~VAAPIFrameHolder();
-+ bool IsUsed() { return mSurface->IsGlobalRefSet(); }
++ // Check if WaylandDMABufSurface is used by any gecko rendering process
++ // (WebRender or GL compositor) or by WaylandDMABUFSurfaceImage/VideoData.
++ bool IsUsed() const { return mSurface->IsGlobalRefSet(); }
+
private:
-+ RefPtr<WaylandDMABufSurface> mSurface;
- FFmpegLibWrapper* mLib;
- AVBufferRef* mVAAPIDeviceContext;
+- FFmpegLibWrapper* mLib;
+- AVBufferRef* mVAAPIDeviceContext;
++ const FFmpegLibWrapper* mLib;
++ const RefPtr<WaylandDMABufSurface> mSurface;
AVBufferRef* mAVHWFramesContext;
-@@ -97,6 +106,7 @@
+- AVBufferRef* mHWFrame;
++ AVBufferRef* mHWAVBuffer;
+ };
+ #endif
+
+@@ -97,6 +121,8 @@
MediaResult CreateImageVAAPI(int64_t aOffset, int64_t aPts, int64_t aDuration,
MediaDataDecoder::DecodedData& aResults);
+ void ReleaseUnusedVAAPIFrames();
++ void ReleaseAllVAAPIFrames();
#endif
/**
-@@ -112,6 +122,7 @@
+@@ -112,6 +138,7 @@
AVBufferRef* mVAAPIDeviceContext;
const bool mDisableHardwareDecoding;
VADisplay mDisplay;
@@ -48,41 +73,43 @@ diff --git a/dom/media/platforms/ffmpeg/FFmpegVideoDecoder.h b/dom/media/platfor
diff --git a/dom/media/platforms/ffmpeg/FFmpegVideoDecoder.cpp b/dom/media/platforms/ffmpeg/FFmpegVideoDecoder.cpp
--- a/dom/media/platforms/ffmpeg/FFmpegVideoDecoder.cpp
+++ b/dom/media/platforms/ffmpeg/FFmpegVideoDecoder.cpp
-@@ -122,19 +122,30 @@
- return AV_PIX_FMT_NONE;
+@@ -123,18 +123,27 @@
}
--VAAPIFrameHolder::VAAPIFrameHolder(FFmpegLibWrapper* aLib,
-+VAAPIFrameHolder::VAAPIFrameHolder(RefPtr<WaylandDMABufSurface> aSurface,
-+ FFmpegLibWrapper* aLib,
- AVBufferRef* aVAAPIDeviceContext,
- AVBufferRef* aAVHWFramesContext,
- AVBufferRef* aHWFrame)
-- : mLib(aLib),
-+ : mSurface(aSurface),
-+ mLib(aLib),
- mVAAPIDeviceContext(mLib->av_buffer_ref(aVAAPIDeviceContext)),
- mAVHWFramesContext(mLib->av_buffer_ref(aAVHWFramesContext)),
+ VAAPIFrameHolder::VAAPIFrameHolder(FFmpegLibWrapper* aLib,
+- AVBufferRef* aVAAPIDeviceContext,
+- AVBufferRef* aAVHWFramesContext,
+- AVBufferRef* aHWFrame)
++ WaylandDMABufSurface* aSurface,
++ AVCodecContext* aAVCodecContext,
++ AVFrame* aAVFrame)
+ : mLib(aLib),
+- mVAAPIDeviceContext(mLib->av_buffer_ref(aVAAPIDeviceContext)),
+- mAVHWFramesContext(mLib->av_buffer_ref(aAVHWFramesContext)),
- mHWFrame(mLib->av_buffer_ref(aHWFrame)){};
-+ mHWFrame(mLib->av_buffer_ref(aHWFrame)) {
++ mSurface(aSurface),
++ mAVHWFramesContext(mLib->av_buffer_ref(aAVCodecContext->hw_frames_ctx)),
++ mHWAVBuffer(mLib->av_buffer_ref(aAVFrame->buf[0])) {
+ FFMPEG_LOG("VAAPIFrameHolder is adding dmabuf surface UID = %d\n",
+ mSurface->GetUID());
++
+ // Create global refcount object to track mSurface usage over
-+ // processes.
++ // gects rendering engine. We can't release it until it's used
++ // by GL compositor / WebRender.
+ mSurface->GlobalRefCountCreate();
+}
VAAPIFrameHolder::~VAAPIFrameHolder() {
+- mLib->av_buffer_unref(&mHWFrame);
+ FFMPEG_LOG("VAAPIFrameHolder is releasing dmabuf surface UID = %d\n",
+ mSurface->GetUID());
- mLib->av_buffer_unref(&mHWFrame);
++ mLib->av_buffer_unref(&mHWAVBuffer);
mLib->av_buffer_unref(&mAVHWFramesContext);
- mLib->av_buffer_unref(&mVAAPIDeviceContext);
-+ mSurface = nullptr;
+- mLib->av_buffer_unref(&mVAAPIDeviceContext);
}
AVCodec* FFmpegVideoDecoder<LIBAV_VER>::FindVAAPICodec() {
-@@ -418,6 +428,13 @@
+@@ -422,6 +431,13 @@
NS_WARNING("FFmpeg h264 decoder failed to allocate frame.");
return MediaResult(NS_ERROR_OUT_OF_MEMORY, __func__);
}
@@ -96,7 +123,7 @@ diff --git a/dom/media/platforms/ffmpeg/FFmpegVideoDecoder.cpp b/dom/media/platf
res = mLib->avcodec_receive_frame(mCodecContext, mFrame);
if (res == int(AVERROR_EOF)) {
return NS_ERROR_DOM_MEDIA_END_OF_STREAM;
-@@ -624,9 +641,16 @@
+@@ -628,9 +644,20 @@
}
#ifdef MOZ_WAYLAND_USE_VAAPI
@@ -113,10 +140,14 @@ diff --git a/dom/media/platforms/ffmpeg/FFmpegVideoDecoder.cpp b/dom/media/platf
+ holder++;
+ }
+ }
++}
++
++void FFmpegVideoDecoder<LIBAV_VER>::ReleaseAllVAAPIFrames() {
++ mFrameHolders.clear();
}
MediaResult FFmpegVideoDecoder<LIBAV_VER>::CreateImageVAAPI(
-@@ -663,20 +687,28 @@
+@@ -667,20 +694,20 @@
RESULT_DETAIL("Unable to allocate WaylandDMABufSurfaceNV12."));
}
@@ -128,28 +159,32 @@ diff --git a/dom/media/platforms/ffmpeg/FFmpegVideoDecoder.cpp b/dom/media/platf
+
surface->SetYUVColorSpace(GetFrameColorSpace());
- // mFrame->buf[0] is a reference to H264 VASurface for this mFrame.
+- // mFrame->buf[0] is a reference to H264 VASurface for this mFrame.
- // We need create WaylandDMABUFSurfaceImage on top of it,
-+ // We need create WaylandDMABUFSurface on top of it,
- // create EGLImage/Texture on top of it and render it by GL.
+- // create EGLImage/Texture on top of it and render it by GL.
++ // Store reference to the decoded HW buffer, see VAAPIFrameHolder struct.
++ auto holder =
++ MakeUnique<VAAPIFrameHolder>(mLib, surface, mCodecContext, mFrame);
++ mFrameHolders.push_back(std::move(holder));
- // FFmpeg tends to reuse the particual VASurface for another frame
- // even when the mFrame is not released. To keep VASurface as is
+- // FFmpeg tends to reuse the particual VASurface for another frame
+- // even when the mFrame is not released. To keep VASurface as is
- // we explicitly reference it and keep until WaylandDMABUFSurfaceImage
- // is live.
- RefPtr<layers::Image> im = new layers::WaylandDMABUFSurfaceImage(
- surface, VAAPIFrameReleaseCallback,
- new VAAPIFrameHolder(mLib, mVAAPIDeviceContext,
- mCodecContext->hw_frames_ctx, mFrame->buf[0]));
-+ // we explicitly reference it and keep until there's any reference to
-+ // attached WaylandDMABUFSurface.
-+ auto holder = MakeUnique<VAAPIFrameHolder>(surface, mLib, mVAAPIDeviceContext,
-+ mCodecContext->hw_frames_ctx,
-+ mFrame->buf[0]);
-+ mFrameHolders.push_back(std::move(holder));
-+
+ RefPtr<layers::Image> im = new layers::WaylandDMABUFSurfaceImage(surface);
RefPtr<VideoData> vp = VideoData::CreateFromImage(
mInfo.mDisplay, aOffset, TimeUnit::FromMicroseconds(aPts),
+@@ -732,6 +759,7 @@
+ void FFmpegVideoDecoder<LIBAV_VER>::ProcessShutdown() {
+ #ifdef MOZ_WAYLAND_USE_VAAPI
+ if (mVAAPIDeviceContext) {
++ ReleaseAllVAAPIFrames();
+ mLib->av_buffer_unref(&mVAAPIDeviceContext);
+ }
+ #endif
bgstack15