1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
|
/* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
/* vim:set ts=2 sw=2 sts=2 et cindent: */
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
#ifndef __FFmpegVideoDecoder_h__
#define __FFmpegVideoDecoder_h__
#include "FFmpegLibWrapper.h"
#include "FFmpegDataDecoder.h"
#include "mozilla/Pair.h"
#include "nsTArray.h"
namespace mozilla
{
template <int V>
class FFmpegVideoDecoder : public FFmpegDataDecoder<V>
{
};
template <>
class FFmpegVideoDecoder<LIBAV_VER> : public FFmpegDataDecoder<LIBAV_VER>
{
typedef mozilla::layers::Image Image;
typedef mozilla::layers::ImageContainer ImageContainer;
public:
FFmpegVideoDecoder(FFmpegLibWrapper* aLib, TaskQueue* aTaskQueue,
MediaDataDecoderCallback* aCallback,
const VideoInfo& aConfig,
ImageContainer* aImageContainer);
virtual ~FFmpegVideoDecoder();
RefPtr<InitPromise> Init() override;
void InitCodecContext() override;
const char* GetDescriptionName() const override
{
#ifdef USING_MOZFFVPX
return "ffvpx video decoder";
#else
return "ffmpeg video decoder";
#endif
}
static AVCodecID GetCodecId(const nsACString& aMimeType);
private:
MediaResult DoDecode(MediaRawData* aSample) override;
MediaResult DoDecode(MediaRawData* aSample, bool* aGotFrame);
MediaResult DoDecode(MediaRawData* aSample, uint8_t* aData, int aSize, bool* aGotFrame);
void ProcessDrain() override;
void ProcessFlush() override;
void OutputDelayedFrames();
/**
* This method allocates a buffer for FFmpeg's decoder, wrapped in an Image.
* Currently it only supports Planar YUV420, which appears to be the only
* non-hardware accelerated image format that FFmpeg's H264 decoder is
* capable of outputting.
*/
int AllocateYUV420PVideoBuffer(AVCodecContext* aCodecContext,
AVFrame* aFrame);
RefPtr<ImageContainer> mImageContainer;
VideoInfo mInfo;
// Parser used for VP8 and VP9 decoding.
AVCodecParserContext* mCodecParser;
class PtsCorrectionContext {
public:
PtsCorrectionContext();
int64_t GuessCorrectPts(int64_t aPts, int64_t aDts);
void Reset();
int64_t LastDts() const { return mLastDts; }
private:
int64_t mNumFaultyPts; /// Number of incorrect PTS values so far
int64_t mNumFaultyDts; /// Number of incorrect DTS values so far
int64_t mLastPts; /// PTS of the last frame
int64_t mLastDts; /// DTS of the last frame
};
PtsCorrectionContext mPtsContext;
int64_t mLastInputDts;
class DurationMap {
public:
typedef Pair<int64_t, int64_t> DurationElement;
// Insert Key and Duration pair at the end of our map.
void Insert(int64_t aKey, int64_t aDuration)
{
mMap.AppendElement(MakePair(aKey, aDuration));
}
// Sets aDuration matching aKey and remove it from the map if found.
// The element returned is the first one found.
// Returns true if found, false otherwise.
bool Find(int64_t aKey, int64_t& aDuration)
{
for (uint32_t i = 0; i < mMap.Length(); i++) {
DurationElement& element = mMap[i];
if (element.first() == aKey) {
aDuration = element.second();
mMap.RemoveElementAt(i);
return true;
}
}
return false;
}
// Remove all elements of the map.
void Clear()
{
mMap.Clear();
}
private:
AutoTArray<DurationElement, 16> mMap;
};
DurationMap mDurationMap;
};
} // namespace mozilla
#endif // __FFmpegVideoDecoder_h__
|