summaryrefslogtreecommitdiff
path: root/mozilla-1619882-2.patch
blob: dc3ab7726ed6b859d394358015233d096b3686fc (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
diff --git a/dom/media/platforms/ffmpeg/FFmpegVideoDecoder.h b/dom/media/platforms/ffmpeg/FFmpegVideoDecoder.h
--- a/dom/media/platforms/ffmpeg/FFmpegVideoDecoder.h
+++ b/dom/media/platforms/ffmpeg/FFmpegVideoDecoder.h
@@ -10,21 +10,45 @@
 #include "FFmpegLibWrapper.h"
 #include "FFmpegDataDecoder.h"
 #include "SimpleMap.h"
+#ifdef MOZ_WAYLAND_USE_VAAPI
+#  include "mozilla/widget/WaylandDMABufSurface.h"
+#  include <list>
+#endif
 
 namespace mozilla {
 
 #ifdef MOZ_WAYLAND_USE_VAAPI
-class VAAPIFrameHolder {
+// When VA-API decoding is running, ffmpeg allocates AVHWFramesContext - a pool
+// of "hardware" frames. Every "hardware" frame (VASurface) is backed
+// by actual piece of GPU memory which holds the decoded image data.
+//
+// The VASurface is wrapped by WaylandDMABufSurface and transferred to
+// rendering queue by WaylandDMABUFSurfaceImage, where TextureClient is
+// created and VASurface is used as a texture there.
+//
+// As there's a limited number of VASurfaces, ffmpeg reuses them to decode
+// next frames ASAP even if they are still attached to WaylandDMABufSurface
+// and used as a texture in our rendering engine.
+//
+// Unfortunately there isn't any obvious way how to mark particular VASurface
+// as used. The best we can do is to hold a reference to particular AVBuffer
+// from decoded AVFrame and AVHWFramesContext which owns the AVBuffer.
+
+class VAAPIFrameHolder final {
  public:
-  VAAPIFrameHolder(FFmpegLibWrapper* aLib, AVBufferRef* aVAAPIDeviceContext,
-                   AVBufferRef* aAVHWFramesContext, AVBufferRef* aHWFrame);
+  VAAPIFrameHolder(FFmpegLibWrapper* aLib, WaylandDMABufSurface* aSurface,
+                   AVCodecContext* aAVCodecContext, AVFrame* aAVFrame);
   ~VAAPIFrameHolder();
 
+  // Check if WaylandDMABufSurface is used by any gecko rendering process
+  // (WebRender or GL compositor) or by WaylandDMABUFSurfaceImage/VideoData.
+  bool IsUsed() const { return mSurface->IsGlobalRefSet(); }
+
  private:
-  FFmpegLibWrapper* mLib;
-  AVBufferRef* mVAAPIDeviceContext;
+  const FFmpegLibWrapper* mLib;
+  const RefPtr<WaylandDMABufSurface> mSurface;
   AVBufferRef* mAVHWFramesContext;
-  AVBufferRef* mHWFrame;
+  AVBufferRef* mHWAVBuffer;
 };
 #endif
 
@@ -97,6 +121,8 @@
 
   MediaResult CreateImageVAAPI(int64_t aOffset, int64_t aPts, int64_t aDuration,
                                MediaDataDecoder::DecodedData& aResults);
+  void ReleaseUnusedVAAPIFrames();
+  void ReleaseAllVAAPIFrames();
 #endif
 
   /**
@@ -112,6 +138,7 @@
   AVBufferRef* mVAAPIDeviceContext;
   const bool mDisableHardwareDecoding;
   VADisplay mDisplay;
+  std::list<UniquePtr<VAAPIFrameHolder>> mFrameHolders;
 #endif
   RefPtr<KnowsCompositor> mImageAllocator;
   RefPtr<ImageContainer> mImageContainer;
diff --git a/dom/media/platforms/ffmpeg/FFmpegVideoDecoder.cpp b/dom/media/platforms/ffmpeg/FFmpegVideoDecoder.cpp
--- a/dom/media/platforms/ffmpeg/FFmpegVideoDecoder.cpp
+++ b/dom/media/platforms/ffmpeg/FFmpegVideoDecoder.cpp
@@ -123,18 +123,27 @@
 }
 
 VAAPIFrameHolder::VAAPIFrameHolder(FFmpegLibWrapper* aLib,
-                                   AVBufferRef* aVAAPIDeviceContext,
-                                   AVBufferRef* aAVHWFramesContext,
-                                   AVBufferRef* aHWFrame)
+                                   WaylandDMABufSurface* aSurface,
+                                   AVCodecContext* aAVCodecContext,
+                                   AVFrame* aAVFrame)
     : mLib(aLib),
-      mVAAPIDeviceContext(mLib->av_buffer_ref(aVAAPIDeviceContext)),
-      mAVHWFramesContext(mLib->av_buffer_ref(aAVHWFramesContext)),
-      mHWFrame(mLib->av_buffer_ref(aHWFrame)){};
+      mSurface(aSurface),
+      mAVHWFramesContext(mLib->av_buffer_ref(aAVCodecContext->hw_frames_ctx)),
+      mHWAVBuffer(mLib->av_buffer_ref(aAVFrame->buf[0])) {
+  FFMPEG_LOG("VAAPIFrameHolder is adding dmabuf surface UID = %d\n",
+             mSurface->GetUID());
+
+  // Create global refcount object to track mSurface usage over
+  // gects rendering engine. We can't release it until it's used
+  // by GL compositor / WebRender.
+  mSurface->GlobalRefCountCreate();
+}
 
 VAAPIFrameHolder::~VAAPIFrameHolder() {
-  mLib->av_buffer_unref(&mHWFrame);
+  FFMPEG_LOG("VAAPIFrameHolder is releasing dmabuf surface UID = %d\n",
+             mSurface->GetUID());
+  mLib->av_buffer_unref(&mHWAVBuffer);
   mLib->av_buffer_unref(&mAVHWFramesContext);
-  mLib->av_buffer_unref(&mVAAPIDeviceContext);
 }
 
 AVCodec* FFmpegVideoDecoder<LIBAV_VER>::FindVAAPICodec() {
@@ -422,6 +431,13 @@
       NS_WARNING("FFmpeg h264 decoder failed to allocate frame.");
       return MediaResult(NS_ERROR_OUT_OF_MEMORY, __func__);
     }
+
+#  ifdef MOZ_WAYLAND_USE_VAAPI
+    if (mVAAPIDeviceContext) {
+      ReleaseUnusedVAAPIFrames();
+    }
+#  endif
+
     res = mLib->avcodec_receive_frame(mCodecContext, mFrame);
     if (res == int(AVERROR_EOF)) {
       return NS_ERROR_DOM_MEDIA_END_OF_STREAM;
@@ -628,9 +644,20 @@
 }
 
 #ifdef MOZ_WAYLAND_USE_VAAPI
-static void VAAPIFrameReleaseCallback(VAAPIFrameHolder* aVAAPIFrameHolder) {
-  auto frameHolder = static_cast<VAAPIFrameHolder*>(aVAAPIFrameHolder);
-  delete frameHolder;
+void FFmpegVideoDecoder<LIBAV_VER>::ReleaseUnusedVAAPIFrames() {
+  std::list<UniquePtr<VAAPIFrameHolder>>::iterator holder =
+      mFrameHolders.begin();
+  while (holder != mFrameHolders.end()) {
+    if (!(*holder)->IsUsed()) {
+      holder = mFrameHolders.erase(holder);
+    } else {
+      holder++;
+    }
+  }
+}
+
+void FFmpegVideoDecoder<LIBAV_VER>::ReleaseAllVAAPIFrames() {
+  mFrameHolders.clear();
 }
 
 MediaResult FFmpegVideoDecoder<LIBAV_VER>::CreateImageVAAPI(
@@ -667,20 +694,20 @@
         RESULT_DETAIL("Unable to allocate WaylandDMABufSurfaceNV12."));
   }
 
+#  ifdef MOZ_LOGGING
+  static int uid = 0;
+  surface->SetUID(++uid);
+  FFMPEG_LOG("Created dmabuf UID = %d HW surface %x\n", uid, surface_id);
+#  endif
+
   surface->SetYUVColorSpace(GetFrameColorSpace());
 
-  // mFrame->buf[0] is a reference to H264 VASurface for this mFrame.
-  // We need create WaylandDMABUFSurfaceImage on top of it,
-  // create EGLImage/Texture on top of it and render it by GL.
+  // Store reference to the decoded HW buffer, see VAAPIFrameHolder struct.
+  auto holder =
+      MakeUnique<VAAPIFrameHolder>(mLib, surface, mCodecContext, mFrame);
+  mFrameHolders.push_back(std::move(holder));
 
-  // FFmpeg tends to reuse the particual VASurface for another frame
-  // even when the mFrame is not released. To keep VASurface as is
-  // we explicitly reference it and keep until WaylandDMABUFSurfaceImage
-  // is live.
-  RefPtr<layers::Image> im = new layers::WaylandDMABUFSurfaceImage(
-      surface, VAAPIFrameReleaseCallback,
-      new VAAPIFrameHolder(mLib, mVAAPIDeviceContext,
-                           mCodecContext->hw_frames_ctx, mFrame->buf[0]));
+  RefPtr<layers::Image> im = new layers::WaylandDMABUFSurfaceImage(surface);
 
   RefPtr<VideoData> vp = VideoData::CreateFromImage(
       mInfo.mDisplay, aOffset, TimeUnit::FromMicroseconds(aPts),
@@ -732,6 +759,7 @@
 void FFmpegVideoDecoder<LIBAV_VER>::ProcessShutdown() {
 #ifdef MOZ_WAYLAND_USE_VAAPI
   if (mVAAPIDeviceContext) {
+    ReleaseAllVAAPIFrames();
     mLib->av_buffer_unref(&mVAAPIDeviceContext);
   }
 #endif

bgstack15