1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
|
diff --git a/dom/media/platforms/ffmpeg/FFmpegVideoDecoder.h b/dom/media/platforms/ffmpeg/FFmpegVideoDecoder.h
--- a/dom/media/platforms/ffmpeg/FFmpegVideoDecoder.h
+++ b/dom/media/platforms/ffmpeg/FFmpegVideoDecoder.h
@@ -10,17 +10,26 @@
#include "FFmpegLibWrapper.h"
#include "FFmpegDataDecoder.h"
#include "SimpleMap.h"
+#ifdef MOZ_WAYLAND_USE_VAAPI
+# include "mozilla/widget/WaylandDMABufSurface.h"
+# include <list>
+#endif
namespace mozilla {
#ifdef MOZ_WAYLAND_USE_VAAPI
+
class VAAPIFrameHolder {
public:
- VAAPIFrameHolder(FFmpegLibWrapper* aLib, AVBufferRef* aVAAPIDeviceContext,
+ VAAPIFrameHolder(RefPtr<WaylandDMABufSurface> aSurface,
+ FFmpegLibWrapper* aLib, AVBufferRef* aVAAPIDeviceContext,
AVBufferRef* aAVHWFramesContext, AVBufferRef* aHWFrame);
~VAAPIFrameHolder();
+ bool IsUsed() { return mSurface->IsGlobalRefSet(); }
+
private:
+ RefPtr<WaylandDMABufSurface> mSurface;
FFmpegLibWrapper* mLib;
AVBufferRef* mVAAPIDeviceContext;
AVBufferRef* mAVHWFramesContext;
@@ -97,6 +106,7 @@
MediaResult CreateImageVAAPI(int64_t aOffset, int64_t aPts, int64_t aDuration,
MediaDataDecoder::DecodedData& aResults);
+ void ReleaseUnusedVAAPIFrames();
#endif
/**
@@ -112,6 +122,7 @@
AVBufferRef* mVAAPIDeviceContext;
const bool mDisableHardwareDecoding;
VADisplay mDisplay;
+ std::list<UniquePtr<VAAPIFrameHolder>> mFrameHolders;
#endif
RefPtr<KnowsCompositor> mImageAllocator;
RefPtr<ImageContainer> mImageContainer;
diff --git a/dom/media/platforms/ffmpeg/FFmpegVideoDecoder.cpp b/dom/media/platforms/ffmpeg/FFmpegVideoDecoder.cpp
--- a/dom/media/platforms/ffmpeg/FFmpegVideoDecoder.cpp
+++ b/dom/media/platforms/ffmpeg/FFmpegVideoDecoder.cpp
@@ -122,19 +122,30 @@
return AV_PIX_FMT_NONE;
}
-VAAPIFrameHolder::VAAPIFrameHolder(FFmpegLibWrapper* aLib,
+VAAPIFrameHolder::VAAPIFrameHolder(RefPtr<WaylandDMABufSurface> aSurface,
+ FFmpegLibWrapper* aLib,
AVBufferRef* aVAAPIDeviceContext,
AVBufferRef* aAVHWFramesContext,
AVBufferRef* aHWFrame)
- : mLib(aLib),
+ : mSurface(aSurface),
+ mLib(aLib),
mVAAPIDeviceContext(mLib->av_buffer_ref(aVAAPIDeviceContext)),
mAVHWFramesContext(mLib->av_buffer_ref(aAVHWFramesContext)),
- mHWFrame(mLib->av_buffer_ref(aHWFrame)){};
+ mHWFrame(mLib->av_buffer_ref(aHWFrame)) {
+ FFMPEG_LOG("VAAPIFrameHolder is adding dmabuf surface UID = %d\n",
+ mSurface->GetUID());
+ // Create global refcount object to track mSurface usage over
+ // processes.
+ mSurface->GlobalRefCountCreate();
+}
VAAPIFrameHolder::~VAAPIFrameHolder() {
+ FFMPEG_LOG("VAAPIFrameHolder is releasing dmabuf surface UID = %d\n",
+ mSurface->GetUID());
mLib->av_buffer_unref(&mHWFrame);
mLib->av_buffer_unref(&mAVHWFramesContext);
mLib->av_buffer_unref(&mVAAPIDeviceContext);
+ mSurface = nullptr;
}
AVCodec* FFmpegVideoDecoder<LIBAV_VER>::FindVAAPICodec() {
@@ -418,6 +428,13 @@
NS_WARNING("FFmpeg h264 decoder failed to allocate frame.");
return MediaResult(NS_ERROR_OUT_OF_MEMORY, __func__);
}
+
+# ifdef MOZ_WAYLAND_USE_VAAPI
+ if (mVAAPIDeviceContext) {
+ ReleaseUnusedVAAPIFrames();
+ }
+# endif
+
res = mLib->avcodec_receive_frame(mCodecContext, mFrame);
if (res == int(AVERROR_EOF)) {
return NS_ERROR_DOM_MEDIA_END_OF_STREAM;
@@ -624,9 +641,16 @@
}
#ifdef MOZ_WAYLAND_USE_VAAPI
-static void VAAPIFrameReleaseCallback(VAAPIFrameHolder* aVAAPIFrameHolder) {
- auto frameHolder = static_cast<VAAPIFrameHolder*>(aVAAPIFrameHolder);
- delete frameHolder;
+void FFmpegVideoDecoder<LIBAV_VER>::ReleaseUnusedVAAPIFrames() {
+ std::list<UniquePtr<VAAPIFrameHolder>>::iterator holder =
+ mFrameHolders.begin();
+ while (holder != mFrameHolders.end()) {
+ if (!(*holder)->IsUsed()) {
+ holder = mFrameHolders.erase(holder);
+ } else {
+ holder++;
+ }
+ }
}
MediaResult FFmpegVideoDecoder<LIBAV_VER>::CreateImageVAAPI(
@@ -663,20 +687,28 @@
RESULT_DETAIL("Unable to allocate WaylandDMABufSurfaceNV12."));
}
+# ifdef MOZ_LOGGING
+ static int uid = 0;
+ surface->SetUID(++uid);
+ FFMPEG_LOG("Created dmabuf UID = %d HW surface %x\n", uid, surface_id);
+# endif
+
surface->SetYUVColorSpace(GetFrameColorSpace());
// mFrame->buf[0] is a reference to H264 VASurface for this mFrame.
- // We need create WaylandDMABUFSurfaceImage on top of it,
+ // We need create WaylandDMABUFSurface on top of it,
// create EGLImage/Texture on top of it and render it by GL.
// FFmpeg tends to reuse the particual VASurface for another frame
// even when the mFrame is not released. To keep VASurface as is
- // we explicitly reference it and keep until WaylandDMABUFSurfaceImage
- // is live.
- RefPtr<layers::Image> im = new layers::WaylandDMABUFSurfaceImage(
- surface, VAAPIFrameReleaseCallback,
- new VAAPIFrameHolder(mLib, mVAAPIDeviceContext,
- mCodecContext->hw_frames_ctx, mFrame->buf[0]));
+ // we explicitly reference it and keep until there's any reference to
+ // attached WaylandDMABUFSurface.
+ auto holder = MakeUnique<VAAPIFrameHolder>(surface, mLib, mVAAPIDeviceContext,
+ mCodecContext->hw_frames_ctx,
+ mFrame->buf[0]);
+ mFrameHolders.push_back(std::move(holder));
+
+ RefPtr<layers::Image> im = new layers::WaylandDMABUFSurfaceImage(surface);
RefPtr<VideoData> vp = VideoData::CreateFromImage(
mInfo.mDisplay, aOffset, TimeUnit::FromMicroseconds(aPts),
|