2014-01-15 06:21:14 +00:00
|
|
|
/* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*-*/
|
|
|
|
/* This Source Code Form is subject to the terms of the Mozilla Public
|
|
|
|
* License, v. 2.0. If a copy of the MPL was not distributed with this file,
|
|
|
|
* You can obtain one at http://mozilla.org/MPL/2.0/. */
|
|
|
|
|
|
|
|
#include "VP8TrackEncoder.h"
|
2015-10-02 05:59:17 +00:00
|
|
|
#include "GeckoProfiler.h"
|
2015-10-02 05:59:17 +00:00
|
|
|
#include "LayersLogging.h"
|
2015-10-02 05:59:17 +00:00
|
|
|
#include "libyuv.h"
|
2015-10-02 05:59:17 +00:00
|
|
|
#include "mozilla/gfx/2D.h"
|
2015-10-02 05:59:17 +00:00
|
|
|
#include "prsystem.h"
|
2014-09-04 00:08:00 +00:00
|
|
|
#include "VideoSegment.h"
|
2014-01-15 06:21:14 +00:00
|
|
|
#include "VideoUtils.h"
|
2015-10-02 05:59:17 +00:00
|
|
|
#include "vpx/vp8cx.h"
|
|
|
|
#include "vpx/vpx_encoder.h"
|
2014-01-15 06:21:14 +00:00
|
|
|
#include "WebMWriter.h"
|
2016-08-23 07:45:23 +00:00
|
|
|
#include "mozilla/media/MediaUtils.h"
|
2017-07-23 21:13:01 +00:00
|
|
|
#include "mozilla/dom/ImageUtils.h"
|
|
|
|
#include "mozilla/dom/ImageBitmapBinding.h"
|
2014-01-15 06:21:14 +00:00
|
|
|
|
|
|
|
namespace mozilla {
|
|
|
|
|
2015-11-15 13:49:01 +00:00
|
|
|
LazyLogModule gVP8TrackEncoderLog("VP8TrackEncoder");
|
2017-01-18 14:01:47 +00:00
|
|
|
#define VP8LOG(level, msg, ...) MOZ_LOG(gVP8TrackEncoderLog, \
|
|
|
|
level, \
|
|
|
|
(msg, ##__VA_ARGS__))
|
2014-01-15 06:21:14 +00:00
|
|
|
|
2015-06-08 03:47:28 +00:00
|
|
|
#define DEFAULT_BITRATE_BPS 2500000
|
2014-01-15 06:21:14 +00:00
|
|
|
|
2015-10-02 05:59:17 +00:00
|
|
|
using namespace mozilla::gfx;
|
2014-02-21 08:35:13 +00:00
|
|
|
using namespace mozilla::layers;
|
2016-12-09 20:46:01 +00:00
|
|
|
using namespace mozilla::media;
|
2017-07-23 21:13:01 +00:00
|
|
|
using namespace mozilla::dom;
|
2016-12-09 20:46:01 +00:00
|
|
|
|
|
|
|
static already_AddRefed<SourceSurface>
|
|
|
|
GetSourceSurface(already_AddRefed<Image> aImg)
|
|
|
|
{
|
|
|
|
RefPtr<Image> img = aImg;
|
|
|
|
if (!img) {
|
|
|
|
return nullptr;
|
|
|
|
}
|
|
|
|
|
|
|
|
if (!img->AsGLImage() || NS_IsMainThread()) {
|
|
|
|
RefPtr<SourceSurface> surf = img->GetAsSourceSurface();
|
|
|
|
return surf.forget();
|
|
|
|
}
|
|
|
|
|
|
|
|
// GLImage::GetAsSourceSurface() only supports main thread
|
|
|
|
RefPtr<SourceSurface> surf;
|
|
|
|
RefPtr<Runnable> runnable = NewRunnableFrom([img, &surf]() -> nsresult {
|
|
|
|
surf = img->GetAsSourceSurface();
|
|
|
|
return NS_OK;
|
|
|
|
});
|
|
|
|
|
|
|
|
NS_DispatchToMainThread(runnable, NS_DISPATCH_SYNC);
|
|
|
|
return surf.forget();
|
|
|
|
}
|
2014-02-21 08:35:13 +00:00
|
|
|
|
2017-10-19 12:38:07 +00:00
|
|
|
VP8TrackEncoder::VP8TrackEncoder(TrackRate aTrackRate,
|
|
|
|
FrameDroppingMode aFrameDroppingMode)
|
|
|
|
: VideoTrackEncoder(aTrackRate, aFrameDroppingMode)
|
2014-01-15 06:21:14 +00:00
|
|
|
, mEncodedTimestamp(0)
|
|
|
|
, mVPXContext(new vpx_codec_ctx_t())
|
|
|
|
, mVPXImageWrapper(new vpx_image_t())
|
|
|
|
{
|
|
|
|
MOZ_COUNT_CTOR(VP8TrackEncoder);
|
|
|
|
}
|
|
|
|
|
|
|
|
VP8TrackEncoder::~VP8TrackEncoder()
|
|
|
|
{
|
2017-04-07 10:13:52 +00:00
|
|
|
Destroy();
|
|
|
|
MOZ_COUNT_DTOR(VP8TrackEncoder);
|
|
|
|
}
|
|
|
|
|
|
|
|
void
|
|
|
|
VP8TrackEncoder::Destroy()
|
|
|
|
{
|
2014-01-15 06:21:14 +00:00
|
|
|
if (mInitialized) {
|
|
|
|
vpx_codec_destroy(mVPXContext);
|
|
|
|
}
|
|
|
|
|
|
|
|
if (mVPXImageWrapper) {
|
|
|
|
vpx_img_free(mVPXImageWrapper);
|
|
|
|
}
|
2017-04-07 10:13:52 +00:00
|
|
|
mInitialized = false;
|
2014-01-15 06:21:14 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
nsresult
|
2014-02-21 06:27:41 +00:00
|
|
|
VP8TrackEncoder::Init(int32_t aWidth, int32_t aHeight, int32_t aDisplayWidth,
|
2016-06-15 15:48:44 +00:00
|
|
|
int32_t aDisplayHeight)
|
2014-01-15 06:21:14 +00:00
|
|
|
{
|
2016-06-15 15:48:44 +00:00
|
|
|
if (aWidth < 1 || aHeight < 1 || aDisplayWidth < 1 || aDisplayHeight < 1) {
|
2014-01-15 06:21:14 +00:00
|
|
|
return NS_ERROR_FAILURE;
|
|
|
|
}
|
|
|
|
|
2017-04-07 10:13:52 +00:00
|
|
|
if (mInitialized) {
|
|
|
|
MOZ_ASSERT(false);
|
|
|
|
return NS_ERROR_FAILURE;
|
|
|
|
}
|
|
|
|
|
|
|
|
// Encoder configuration structure.
|
|
|
|
vpx_codec_enc_cfg_t config;
|
|
|
|
nsresult rv = SetConfigurationValues(aWidth, aHeight, aDisplayWidth, aDisplayHeight, config);
|
|
|
|
NS_ENSURE_SUCCESS(rv, NS_ERROR_FAILURE);
|
|
|
|
|
|
|
|
// Creating a wrapper to the image - setting image data to NULL. Actual
|
|
|
|
// pointer will be set in encode. Setting align to 1, as it is meaningless
|
|
|
|
// (actual memory is not allocated).
|
|
|
|
vpx_img_wrap(mVPXImageWrapper, VPX_IMG_FMT_I420,
|
|
|
|
mFrameWidth, mFrameHeight, 1, nullptr);
|
|
|
|
|
|
|
|
vpx_codec_flags_t flags = 0;
|
|
|
|
flags |= VPX_CODEC_USE_OUTPUT_PARTITION;
|
|
|
|
if (vpx_codec_enc_init(mVPXContext, vpx_codec_vp8_cx(), &config, flags)) {
|
|
|
|
return NS_ERROR_FAILURE;
|
|
|
|
}
|
|
|
|
|
|
|
|
vpx_codec_control(mVPXContext, VP8E_SET_STATIC_THRESHOLD, 1);
|
|
|
|
vpx_codec_control(mVPXContext, VP8E_SET_CPUUSED, -6);
|
|
|
|
vpx_codec_control(mVPXContext, VP8E_SET_TOKEN_PARTITIONS,
|
|
|
|
VP8_ONE_TOKENPARTITION);
|
|
|
|
|
2017-05-24 16:51:47 +00:00
|
|
|
SetInitialized();
|
2017-04-07 10:13:52 +00:00
|
|
|
|
|
|
|
return NS_OK;
|
|
|
|
}
|
|
|
|
|
|
|
|
nsresult
|
|
|
|
VP8TrackEncoder::Reconfigure(int32_t aWidth, int32_t aHeight,
|
|
|
|
int32_t aDisplayWidth, int32_t aDisplayHeight)
|
|
|
|
{
|
|
|
|
if(aWidth <= 0 || aHeight <= 0 || aDisplayWidth <= 0 || aDisplayHeight <= 0) {
|
|
|
|
MOZ_ASSERT(false);
|
|
|
|
return NS_ERROR_FAILURE;
|
|
|
|
}
|
|
|
|
|
|
|
|
if (!mInitialized) {
|
|
|
|
MOZ_ASSERT(false);
|
|
|
|
return NS_ERROR_FAILURE;
|
|
|
|
}
|
|
|
|
|
|
|
|
// Recreate image wrapper
|
|
|
|
vpx_img_free(mVPXImageWrapper);
|
|
|
|
vpx_img_wrap(mVPXImageWrapper, VPX_IMG_FMT_I420, aWidth, aHeight, 1, nullptr);
|
|
|
|
// Encoder configuration structure.
|
|
|
|
vpx_codec_enc_cfg_t config;
|
|
|
|
nsresult rv = SetConfigurationValues(aWidth, aHeight, aDisplayWidth, aDisplayHeight, config);
|
|
|
|
NS_ENSURE_SUCCESS(rv, NS_ERROR_FAILURE);
|
|
|
|
// Set new configuration
|
|
|
|
if (vpx_codec_enc_config_set(mVPXContext.get(), &config) != VPX_CODEC_OK) {
|
|
|
|
VP8LOG(LogLevel::Error, "Failed to set new configuration");
|
|
|
|
return NS_ERROR_FAILURE;
|
|
|
|
}
|
|
|
|
return NS_OK;
|
|
|
|
}
|
2014-01-15 06:21:14 +00:00
|
|
|
|
2017-04-07 10:13:52 +00:00
|
|
|
nsresult
|
|
|
|
VP8TrackEncoder::SetConfigurationValues(int32_t aWidth, int32_t aHeight, int32_t aDisplayWidth,
|
|
|
|
int32_t aDisplayHeight, vpx_codec_enc_cfg_t& config)
|
|
|
|
{
|
2014-01-15 06:21:14 +00:00
|
|
|
mFrameWidth = aWidth;
|
|
|
|
mFrameHeight = aHeight;
|
2014-02-21 06:27:41 +00:00
|
|
|
mDisplayWidth = aDisplayWidth;
|
|
|
|
mDisplayHeight = aDisplayHeight;
|
2014-01-15 06:21:14 +00:00
|
|
|
|
|
|
|
// Encoder configuration structure.
|
|
|
|
memset(&config, 0, sizeof(vpx_codec_enc_cfg_t));
|
|
|
|
if (vpx_codec_enc_config_default(vpx_codec_vp8_cx(), &config, 0)) {
|
2017-04-07 10:13:52 +00:00
|
|
|
VP8LOG(LogLevel::Error, "Failed to get default configuration");
|
2014-01-15 06:21:14 +00:00
|
|
|
return NS_ERROR_FAILURE;
|
|
|
|
}
|
|
|
|
|
|
|
|
config.g_w = mFrameWidth;
|
|
|
|
config.g_h = mFrameHeight;
|
|
|
|
// TODO: Maybe we should have various aFrameRate bitrate pair for each devices?
|
|
|
|
// or for different platform
|
2015-06-08 03:47:28 +00:00
|
|
|
|
|
|
|
// rc_target_bitrate needs kbit/s
|
|
|
|
config.rc_target_bitrate = (mVideoBitrate != 0 ? mVideoBitrate : DEFAULT_BITRATE_BPS)/1000;
|
2014-01-15 06:21:14 +00:00
|
|
|
|
|
|
|
// Setting the time base of the codec
|
|
|
|
config.g_timebase.num = 1;
|
|
|
|
config.g_timebase.den = mTrackRate;
|
|
|
|
|
|
|
|
config.g_error_resilient = 0;
|
|
|
|
|
|
|
|
config.g_lag_in_frames = 0; // 0- no frame lagging
|
|
|
|
|
|
|
|
int32_t number_of_cores = PR_GetNumberOfProcessors();
|
|
|
|
if (mFrameWidth * mFrameHeight > 1280 * 960 && number_of_cores >= 6) {
|
|
|
|
config.g_threads = 3; // 3 threads for 1080p.
|
|
|
|
} else if (mFrameWidth * mFrameHeight > 640 * 480 && number_of_cores >= 3) {
|
|
|
|
config.g_threads = 2; // 2 threads for qHD/HD.
|
|
|
|
} else {
|
|
|
|
config.g_threads = 1; // 1 thread for VGA or less
|
|
|
|
}
|
|
|
|
|
|
|
|
// rate control settings
|
|
|
|
config.rc_dropframe_thresh = 0;
|
2017-01-12 16:13:13 +00:00
|
|
|
config.rc_end_usage = VPX_VBR;
|
2014-01-15 06:21:14 +00:00
|
|
|
config.g_pass = VPX_RC_ONE_PASS;
|
2016-02-29 22:25:40 +00:00
|
|
|
// ffmpeg doesn't currently support streams that use resize.
|
|
|
|
// Therefore, for safety, we should turn it off until it does.
|
|
|
|
config.rc_resize_allowed = 0;
|
2014-01-15 06:21:14 +00:00
|
|
|
config.rc_undershoot_pct = 100;
|
|
|
|
config.rc_overshoot_pct = 15;
|
|
|
|
config.rc_buf_initial_sz = 500;
|
|
|
|
config.rc_buf_optimal_sz = 600;
|
|
|
|
config.rc_buf_sz = 1000;
|
|
|
|
|
|
|
|
config.kf_mode = VPX_KF_AUTO;
|
|
|
|
// Ensure that we can output one I-frame per second.
|
2017-01-12 11:19:34 +00:00
|
|
|
config.kf_max_dist = 60;
|
2014-01-15 06:21:14 +00:00
|
|
|
|
|
|
|
return NS_OK;
|
|
|
|
}
|
|
|
|
|
|
|
|
already_AddRefed<TrackMetadataBase>
|
|
|
|
VP8TrackEncoder::GetMetadata()
|
|
|
|
{
|
Bug 1375392 - Tweak the PROFILER_LABEL* macros. r=mstange.
This patch makes the following changes to the macros.
- Removes PROFILER_LABEL_FUNC. It's only suitable for use in functions outside
classes, due to PROFILER_FUNCTION_NAME not getting class names, and it was
mostly misused.
- Removes PROFILER_FUNCTION_NAME. It's no longer used, and __func__ is
universally available now anyway.
- Combines the first two string literal arguments of PROFILER_LABEL and
PROFILER_LABEL_DYNAMIC into a single argument. There was no good reason for
them to be separate, and it forced a '::' in the label, which isn't always
appropriate. Also, the meaning of the "name_space" argument was interpreted
in an interesting variety of ways.
- Adds an "AUTO_" prefix to PROFILER_LABEL and PROFILER_LABEL_DYNAMIC, to make
it clearer they construct RAII objects rather than just being function calls.
(I myself have screwed up the scoping because of this in the past.)
- Fills in the 'js::ProfileEntry::Category::' qualifier within the macro, so
the caller doesn't need to. This makes a *lot* more of the uses fit onto a
single line.
The patch also makes the following changes to the macro uses (beyond those
required by the changes described above).
- Fixes a bunch of labels that had gotten out of sync with the name of the
class and/or function that encloses them.
- Removes a useless PROFILER_LABEL use within a trivial scope in
EventStateManager::DispatchMouseOrPointerEvent(). It clearly wasn't serving
any useful purpose. It also serves as extra evidence that the AUTO_ prefix is
a good idea.
- Tweaks DecodePool::SyncRunIf{Preferred,Possible} so that the labelling is
done within them, instead of at their callsites, because that's a more
standard way of doing things.
--HG--
extra : rebase_source : 318d1bc6fc1425a94aacbf489dd46e4f83211de4
2017-06-22 07:08:53 +00:00
|
|
|
AUTO_PROFILER_LABEL("VP8TrackEncoder::GetMetadata", OTHER);
|
2017-05-24 16:51:47 +00:00
|
|
|
|
|
|
|
MOZ_ASSERT(mInitialized || mCanceled);
|
2014-01-15 06:21:14 +00:00
|
|
|
|
|
|
|
if (mCanceled || mEncodingComplete) {
|
|
|
|
return nullptr;
|
|
|
|
}
|
|
|
|
|
2017-05-24 16:51:47 +00:00
|
|
|
if (!mInitialized) {
|
|
|
|
return nullptr;
|
|
|
|
}
|
|
|
|
|
2015-10-18 05:24:48 +00:00
|
|
|
RefPtr<VP8Metadata> meta = new VP8Metadata();
|
2014-01-15 06:21:14 +00:00
|
|
|
meta->mWidth = mFrameWidth;
|
|
|
|
meta->mHeight = mFrameHeight;
|
2014-02-21 06:27:41 +00:00
|
|
|
meta->mDisplayWidth = mDisplayWidth;
|
|
|
|
meta->mDisplayHeight = mDisplayHeight;
|
2014-01-15 06:21:14 +00:00
|
|
|
|
2017-05-24 16:51:47 +00:00
|
|
|
VP8LOG(LogLevel::Info, "GetMetadata() width=%d, height=%d, "
|
|
|
|
"displayWidht=%d, displayHeight=%d",
|
|
|
|
meta->mWidth, meta->mHeight, meta->mDisplayWidth, meta->mDisplayHeight);
|
|
|
|
|
2014-01-15 06:21:14 +00:00
|
|
|
return meta.forget();
|
|
|
|
}
|
|
|
|
|
2017-01-24 15:19:00 +00:00
|
|
|
nsresult
|
2014-01-15 06:21:14 +00:00
|
|
|
VP8TrackEncoder::GetEncodedPartitions(EncodedFrameContainer& aData)
|
|
|
|
{
|
|
|
|
vpx_codec_iter_t iter = nullptr;
|
2014-02-24 14:19:21 +00:00
|
|
|
EncodedFrame::FrameType frameType = EncodedFrame::VP8_P_FRAME;
|
2014-01-15 06:21:14 +00:00
|
|
|
nsTArray<uint8_t> frameData;
|
|
|
|
const vpx_codec_cx_pkt_t *pkt = nullptr;
|
|
|
|
while ((pkt = vpx_codec_get_cx_data(mVPXContext, &iter)) != nullptr) {
|
|
|
|
switch (pkt->kind) {
|
|
|
|
case VPX_CODEC_CX_FRAME_PKT: {
|
|
|
|
// Copy the encoded data from libvpx to frameData
|
|
|
|
frameData.AppendElements((uint8_t*)pkt->data.frame.buf,
|
|
|
|
pkt->data.frame.sz);
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
default: {
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
// End of frame
|
|
|
|
if ((pkt->data.frame.flags & VPX_FRAME_IS_FRAGMENT) == 0) {
|
|
|
|
if (pkt->data.frame.flags & VPX_FRAME_IS_KEY) {
|
2014-02-24 14:19:21 +00:00
|
|
|
frameType = EncodedFrame::VP8_I_FRAME;
|
2014-01-15 06:21:14 +00:00
|
|
|
}
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2016-02-26 08:07:29 +00:00
|
|
|
if (!frameData.IsEmpty()) {
|
2014-01-15 06:21:14 +00:00
|
|
|
// Copy the encoded data to aData.
|
|
|
|
EncodedFrame* videoData = new EncodedFrame();
|
|
|
|
videoData->SetFrameType(frameType);
|
2017-01-24 15:19:00 +00:00
|
|
|
|
2014-01-15 06:21:14 +00:00
|
|
|
// Convert the timestamp and duration to Usecs.
|
2016-02-26 08:07:29 +00:00
|
|
|
CheckedInt64 timestamp = FramesToUsecs(pkt->data.frame.pts, mTrackRate);
|
2017-01-24 15:19:00 +00:00
|
|
|
if (!timestamp.isValid()) {
|
|
|
|
NS_ERROR("Microsecond timestamp overflow");
|
|
|
|
return NS_ERROR_DOM_MEDIA_OVERFLOW_ERR;
|
|
|
|
}
|
|
|
|
videoData->SetTimeStamp((uint64_t)timestamp.value());
|
|
|
|
|
|
|
|
mExtractedDuration += pkt->data.frame.duration;
|
|
|
|
if (!mExtractedDuration.isValid()) {
|
|
|
|
NS_ERROR("Duration overflow");
|
|
|
|
return NS_ERROR_DOM_MEDIA_OVERFLOW_ERR;
|
2014-01-15 06:21:14 +00:00
|
|
|
}
|
2017-01-24 15:19:00 +00:00
|
|
|
|
|
|
|
CheckedInt64 totalDuration =
|
|
|
|
FramesToUsecs(mExtractedDuration.value(), mTrackRate);
|
|
|
|
if (!totalDuration.isValid()) {
|
|
|
|
NS_ERROR("Duration overflow");
|
|
|
|
return NS_ERROR_DOM_MEDIA_OVERFLOW_ERR;
|
2014-01-15 06:21:14 +00:00
|
|
|
}
|
2017-01-24 15:19:00 +00:00
|
|
|
|
|
|
|
CheckedInt64 duration = totalDuration - mExtractedDurationUs;
|
|
|
|
if (!duration.isValid()) {
|
|
|
|
NS_ERROR("Duration overflow");
|
|
|
|
return NS_ERROR_DOM_MEDIA_OVERFLOW_ERR;
|
|
|
|
}
|
|
|
|
|
|
|
|
mExtractedDurationUs = totalDuration;
|
|
|
|
videoData->SetDuration((uint64_t)duration.value());
|
2014-08-12 08:57:55 +00:00
|
|
|
videoData->SwapInFrameData(frameData);
|
2017-01-18 14:01:47 +00:00
|
|
|
VP8LOG(LogLevel::Verbose,
|
2016-12-16 03:16:31 +00:00
|
|
|
"GetEncodedPartitions TimeStamp %" PRIu64 ", Duration %" PRIu64 ", FrameType %d",
|
2017-01-18 14:01:47 +00:00
|
|
|
videoData->GetTimeStamp(), videoData->GetDuration(),
|
|
|
|
videoData->GetFrameType());
|
2014-01-15 06:21:14 +00:00
|
|
|
aData.AppendEncodedFrame(videoData);
|
|
|
|
}
|
|
|
|
|
2017-01-24 15:19:00 +00:00
|
|
|
return pkt ? NS_OK : NS_ERROR_NOT_AVAILABLE;
|
2014-01-15 06:21:14 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
nsresult VP8TrackEncoder::PrepareRawFrame(VideoChunk &aChunk)
|
|
|
|
{
|
2015-10-18 05:24:48 +00:00
|
|
|
RefPtr<Image> img;
|
2014-02-10 03:22:03 +00:00
|
|
|
if (aChunk.mFrame.GetForceBlack() || aChunk.IsNull()) {
|
2014-09-04 00:08:00 +00:00
|
|
|
if (!mMuteFrame) {
|
2015-09-23 18:49:05 +00:00
|
|
|
mMuteFrame = VideoFrame::CreateBlackImage(gfx::IntSize(mFrameWidth, mFrameHeight));
|
2017-04-28 11:58:22 +00:00
|
|
|
}
|
|
|
|
if (!mMuteFrame) {
|
|
|
|
VP8LOG(LogLevel::Warning, "Failed to allocate black image of size %dx%d",
|
|
|
|
mFrameWidth, mFrameHeight);
|
|
|
|
return NS_OK;
|
2014-01-15 06:21:14 +00:00
|
|
|
}
|
2014-09-04 00:08:00 +00:00
|
|
|
img = mMuteFrame;
|
|
|
|
} else {
|
|
|
|
img = aChunk.mFrame.GetImage();
|
|
|
|
}
|
|
|
|
|
2015-10-02 05:59:17 +00:00
|
|
|
if (img->GetSize() != IntSize(mFrameWidth, mFrameHeight)) {
|
2017-04-07 10:13:52 +00:00
|
|
|
VP8LOG(LogLevel::Info,
|
|
|
|
"Dynamic resolution change (was %dx%d, now %dx%d).",
|
2015-10-02 05:59:17 +00:00
|
|
|
mFrameWidth, mFrameHeight, img->GetSize().width, img->GetSize().height);
|
2017-04-07 10:13:52 +00:00
|
|
|
|
|
|
|
|
|
|
|
gfx::IntSize intrinsicSize = aChunk.mFrame.GetIntrinsicSize();
|
|
|
|
gfx::IntSize imgSize = aChunk.mFrame.GetImage()->GetSize();
|
|
|
|
if (imgSize <= IntSize(mFrameWidth, mFrameHeight) && // check buffer size instead
|
|
|
|
// If the new size is less than or equal to old,
|
|
|
|
// the existing encoder instance can continue.
|
|
|
|
NS_SUCCEEDED(Reconfigure(imgSize.width,
|
|
|
|
imgSize.height,
|
|
|
|
intrinsicSize.width,
|
|
|
|
intrinsicSize.height))) {
|
|
|
|
VP8LOG(LogLevel::Info, "Reconfigured VP8 encoder.");
|
|
|
|
} else {
|
|
|
|
// New frame size is larger; re-create the encoder.
|
|
|
|
Destroy();
|
|
|
|
nsresult rv = Init(imgSize.width,
|
|
|
|
imgSize.height,
|
|
|
|
intrinsicSize.width,
|
|
|
|
intrinsicSize.height);
|
|
|
|
VP8LOG(LogLevel::Info, "Recreated VP8 encoder.");
|
|
|
|
NS_ENSURE_SUCCESS(rv, rv);
|
|
|
|
}
|
2015-10-02 05:59:17 +00:00
|
|
|
}
|
|
|
|
|
2014-09-04 00:08:00 +00:00
|
|
|
ImageFormat format = img->GetFormat();
|
2015-10-02 05:59:17 +00:00
|
|
|
if (format == ImageFormat::PLANAR_YCBCR) {
|
|
|
|
PlanarYCbCrImage* yuv = static_cast<PlanarYCbCrImage *>(img.get());
|
|
|
|
|
2015-10-02 05:59:18 +00:00
|
|
|
MOZ_RELEASE_ASSERT(yuv);
|
2015-10-02 05:59:17 +00:00
|
|
|
if (!yuv->IsValid()) {
|
|
|
|
NS_WARNING("PlanarYCbCrImage is not valid");
|
|
|
|
return NS_ERROR_FAILURE;
|
|
|
|
}
|
|
|
|
|
2017-07-23 21:13:01 +00:00
|
|
|
// The ImageUtils API may change depending on our support for ImageBitmap
|
|
|
|
// extensions. Should this happen in a breaking way we should abstract out
|
|
|
|
// the format detection for use here.
|
|
|
|
const ImageUtils imageUtils(img);
|
|
|
|
const ImageBitmapFormat imageBitmapFormat = imageUtils.GetFormat();
|
|
|
|
|
|
|
|
if (imageBitmapFormat == ImageBitmapFormat::YUV420P) {
|
2015-10-02 05:59:17 +00:00
|
|
|
// 420 planar, no need for conversions
|
2017-07-23 21:13:01 +00:00
|
|
|
const PlanarYCbCrImage::Data* data = yuv->GetData();
|
2015-10-02 05:59:17 +00:00
|
|
|
mVPXImageWrapper->planes[VPX_PLANE_Y] = data->mYChannel;
|
|
|
|
mVPXImageWrapper->planes[VPX_PLANE_U] = data->mCbChannel;
|
|
|
|
mVPXImageWrapper->planes[VPX_PLANE_V] = data->mCrChannel;
|
|
|
|
mVPXImageWrapper->stride[VPX_PLANE_Y] = data->mYStride;
|
|
|
|
mVPXImageWrapper->stride[VPX_PLANE_U] = data->mCbCrStride;
|
|
|
|
mVPXImageWrapper->stride[VPX_PLANE_V] = data->mCbCrStride;
|
|
|
|
|
|
|
|
return NS_OK;
|
|
|
|
}
|
2014-09-04 00:08:00 +00:00
|
|
|
}
|
2014-01-15 06:21:14 +00:00
|
|
|
|
2015-10-02 05:59:17 +00:00
|
|
|
// Not 420 planar, have to convert
|
|
|
|
uint32_t yPlaneSize = mFrameWidth * mFrameHeight;
|
|
|
|
uint32_t halfWidth = (mFrameWidth + 1) / 2;
|
|
|
|
uint32_t halfHeight = (mFrameHeight + 1) / 2;
|
|
|
|
uint32_t uvPlaneSize = halfWidth * halfHeight;
|
|
|
|
|
2017-04-07 10:13:52 +00:00
|
|
|
if (mI420Frame.Length() != yPlaneSize + uvPlaneSize * 2) {
|
2015-10-02 05:59:17 +00:00
|
|
|
mI420Frame.SetLength(yPlaneSize + uvPlaneSize * 2);
|
2014-09-04 00:08:00 +00:00
|
|
|
}
|
2014-02-21 08:35:13 +00:00
|
|
|
|
2015-10-02 05:59:17 +00:00
|
|
|
uint8_t *y = mI420Frame.Elements();
|
|
|
|
uint8_t *cb = mI420Frame.Elements() + yPlaneSize;
|
|
|
|
uint8_t *cr = mI420Frame.Elements() + yPlaneSize + uvPlaneSize;
|
|
|
|
|
|
|
|
if (format == ImageFormat::PLANAR_YCBCR) {
|
|
|
|
PlanarYCbCrImage* yuv = static_cast<PlanarYCbCrImage *>(img.get());
|
|
|
|
|
2015-10-02 05:59:18 +00:00
|
|
|
MOZ_RELEASE_ASSERT(yuv);
|
2015-10-02 05:59:17 +00:00
|
|
|
if (!yuv->IsValid()) {
|
|
|
|
NS_WARNING("PlanarYCbCrImage is not valid");
|
|
|
|
return NS_ERROR_FAILURE;
|
|
|
|
}
|
2017-07-23 21:13:01 +00:00
|
|
|
|
|
|
|
const ImageUtils imageUtils(img);
|
|
|
|
const ImageBitmapFormat imageBitmapFormat = imageUtils.GetFormat();
|
2015-10-02 05:59:17 +00:00
|
|
|
const PlanarYCbCrImage::Data *data = yuv->GetData();
|
2014-09-04 00:08:00 +00:00
|
|
|
|
2015-10-02 05:59:17 +00:00
|
|
|
int rv;
|
|
|
|
std::string yuvFormat;
|
2017-07-23 21:13:01 +00:00
|
|
|
if (imageBitmapFormat == ImageBitmapFormat::YUV420SP_NV12) {
|
|
|
|
rv = libyuv::NV12ToI420(data->mYChannel,
|
|
|
|
data->mYStride,
|
|
|
|
data->mCbChannel,
|
|
|
|
data->mCbCrStride,
|
|
|
|
y,
|
|
|
|
mFrameWidth,
|
|
|
|
cb,
|
|
|
|
halfWidth,
|
|
|
|
cr,
|
|
|
|
halfWidth,
|
|
|
|
mFrameWidth,
|
|
|
|
mFrameHeight);
|
|
|
|
yuvFormat = "NV12";
|
|
|
|
} else if (imageBitmapFormat == ImageBitmapFormat::YUV420SP_NV21) {
|
|
|
|
rv = libyuv::NV21ToI420(data->mYChannel,
|
|
|
|
data->mYStride,
|
|
|
|
data->mCrChannel,
|
|
|
|
data->mCbCrStride,
|
|
|
|
y,
|
|
|
|
mFrameWidth,
|
|
|
|
cb,
|
|
|
|
halfWidth,
|
|
|
|
cr,
|
|
|
|
halfWidth,
|
|
|
|
mFrameWidth,
|
|
|
|
mFrameHeight);
|
|
|
|
yuvFormat = "NV21";
|
|
|
|
} else if (imageBitmapFormat == ImageBitmapFormat::YUV444P) {
|
|
|
|
rv = libyuv::I444ToI420(data->mYChannel,
|
|
|
|
data->mYStride,
|
|
|
|
data->mCbChannel,
|
|
|
|
data->mCbCrStride,
|
|
|
|
data->mCrChannel,
|
|
|
|
data->mCbCrStride,
|
|
|
|
y,
|
|
|
|
mFrameWidth,
|
|
|
|
cb,
|
|
|
|
halfWidth,
|
|
|
|
cr,
|
|
|
|
halfWidth,
|
|
|
|
mFrameWidth,
|
|
|
|
mFrameHeight);
|
2015-10-02 05:59:17 +00:00
|
|
|
yuvFormat = "I444";
|
2017-07-23 21:13:01 +00:00
|
|
|
} else if (imageBitmapFormat == ImageBitmapFormat::YUV422P) {
|
|
|
|
rv = libyuv::I422ToI420(data->mYChannel,
|
|
|
|
data->mYStride,
|
|
|
|
data->mCbChannel,
|
|
|
|
data->mCbCrStride,
|
|
|
|
data->mCrChannel,
|
|
|
|
data->mCbCrStride,
|
|
|
|
y,
|
|
|
|
mFrameWidth,
|
|
|
|
cb,
|
|
|
|
halfWidth,
|
|
|
|
cr,
|
|
|
|
halfWidth,
|
|
|
|
mFrameWidth,
|
|
|
|
mFrameHeight);
|
2015-10-02 05:59:17 +00:00
|
|
|
yuvFormat = "I422";
|
2014-09-04 00:08:00 +00:00
|
|
|
} else {
|
2017-01-18 14:01:47 +00:00
|
|
|
VP8LOG(LogLevel::Error, "Unsupported planar format");
|
2015-10-02 05:59:18 +00:00
|
|
|
NS_ASSERTION(false, "Unsupported planar format");
|
2014-09-04 00:08:00 +00:00
|
|
|
return NS_ERROR_NOT_IMPLEMENTED;
|
2014-02-21 08:35:13 +00:00
|
|
|
}
|
2014-09-04 00:08:00 +00:00
|
|
|
|
2015-10-02 05:59:17 +00:00
|
|
|
if (rv != 0) {
|
2017-01-18 14:01:47 +00:00
|
|
|
VP8LOG(LogLevel::Error, "Converting an %s frame to I420 failed", yuvFormat.c_str());
|
2015-10-02 05:59:17 +00:00
|
|
|
return NS_ERROR_FAILURE;
|
|
|
|
}
|
|
|
|
|
2017-01-18 14:01:47 +00:00
|
|
|
VP8LOG(LogLevel::Verbose, "Converted an %s frame to I420", yuvFormat.c_str());
|
2015-10-02 05:59:17 +00:00
|
|
|
} else {
|
|
|
|
// Not YCbCr at all. Try to get access to the raw data and convert.
|
|
|
|
|
2016-08-23 07:45:23 +00:00
|
|
|
RefPtr<SourceSurface> surf = GetSourceSurface(img.forget());
|
2015-10-02 05:59:17 +00:00
|
|
|
if (!surf) {
|
2017-01-18 14:01:47 +00:00
|
|
|
VP8LOG(LogLevel::Error, "Getting surface from %s image failed", Stringify(format).c_str());
|
2015-10-02 05:59:17 +00:00
|
|
|
return NS_ERROR_FAILURE;
|
|
|
|
}
|
|
|
|
|
2015-10-18 05:24:48 +00:00
|
|
|
RefPtr<DataSourceSurface> data = surf->GetDataSurface();
|
2015-10-02 05:59:17 +00:00
|
|
|
if (!data) {
|
2017-01-18 14:01:47 +00:00
|
|
|
VP8LOG(LogLevel::Error, "Getting data surface from %s image with %s (%s) surface failed",
|
2015-10-02 05:59:17 +00:00
|
|
|
Stringify(format).c_str(), Stringify(surf->GetType()).c_str(),
|
|
|
|
Stringify(surf->GetFormat()).c_str());
|
|
|
|
return NS_ERROR_FAILURE;
|
|
|
|
}
|
|
|
|
|
|
|
|
DataSourceSurface::ScopedMap map(data, DataSourceSurface::READ);
|
|
|
|
if (!map.IsMapped()) {
|
2017-01-18 14:01:47 +00:00
|
|
|
VP8LOG(LogLevel::Error, "Reading DataSourceSurface from %s image with %s (%s) surface failed",
|
2015-10-02 05:59:17 +00:00
|
|
|
Stringify(format).c_str(), Stringify(surf->GetType()).c_str(),
|
|
|
|
Stringify(surf->GetFormat()).c_str());
|
|
|
|
return NS_ERROR_FAILURE;
|
|
|
|
}
|
|
|
|
|
|
|
|
int rv;
|
|
|
|
switch (surf->GetFormat()) {
|
|
|
|
case SurfaceFormat::B8G8R8A8:
|
|
|
|
case SurfaceFormat::B8G8R8X8:
|
|
|
|
rv = libyuv::ARGBToI420(static_cast<uint8*>(map.GetData()),
|
|
|
|
map.GetStride(),
|
|
|
|
y, mFrameWidth,
|
|
|
|
cb, halfWidth,
|
|
|
|
cr, halfWidth,
|
|
|
|
mFrameWidth, mFrameHeight);
|
|
|
|
break;
|
2015-10-23 06:01:31 +00:00
|
|
|
case SurfaceFormat::R5G6B5_UINT16:
|
2015-10-02 05:59:17 +00:00
|
|
|
rv = libyuv::RGB565ToI420(static_cast<uint8*>(map.GetData()),
|
|
|
|
map.GetStride(),
|
|
|
|
y, mFrameWidth,
|
|
|
|
cb, halfWidth,
|
|
|
|
cr, halfWidth,
|
|
|
|
mFrameWidth, mFrameHeight);
|
|
|
|
break;
|
|
|
|
default:
|
2017-01-18 14:01:47 +00:00
|
|
|
VP8LOG(LogLevel::Error, "Unsupported SourceSurface format %s",
|
2015-10-02 05:59:17 +00:00
|
|
|
Stringify(surf->GetFormat()).c_str());
|
2015-10-02 05:59:18 +00:00
|
|
|
NS_ASSERTION(false, "Unsupported SourceSurface format");
|
2015-10-02 05:59:17 +00:00
|
|
|
return NS_ERROR_NOT_IMPLEMENTED;
|
|
|
|
}
|
|
|
|
|
|
|
|
if (rv != 0) {
|
2017-01-18 14:01:47 +00:00
|
|
|
VP8LOG(LogLevel::Error, "%s to I420 conversion failed",
|
2015-10-02 05:59:17 +00:00
|
|
|
Stringify(surf->GetFormat()).c_str());
|
|
|
|
return NS_ERROR_FAILURE;
|
|
|
|
}
|
|
|
|
|
2017-01-18 14:01:47 +00:00
|
|
|
VP8LOG(LogLevel::Verbose, "Converted a %s frame to I420",
|
|
|
|
Stringify(surf->GetFormat()).c_str());
|
2014-01-15 06:21:14 +00:00
|
|
|
}
|
2014-09-04 00:08:00 +00:00
|
|
|
|
2015-10-02 05:59:17 +00:00
|
|
|
mVPXImageWrapper->planes[VPX_PLANE_Y] = y;
|
|
|
|
mVPXImageWrapper->planes[VPX_PLANE_U] = cb;
|
|
|
|
mVPXImageWrapper->planes[VPX_PLANE_V] = cr;
|
|
|
|
mVPXImageWrapper->stride[VPX_PLANE_Y] = mFrameWidth;
|
|
|
|
mVPXImageWrapper->stride[VPX_PLANE_U] = halfWidth;
|
|
|
|
mVPXImageWrapper->stride[VPX_PLANE_V] = halfWidth;
|
|
|
|
|
2014-01-15 06:21:14 +00:00
|
|
|
return NS_OK;
|
|
|
|
}
|
|
|
|
|
|
|
|
// These two define value used in GetNextEncodeOperation to determine the
|
|
|
|
// EncodeOperation for next target frame.
|
|
|
|
#define I_FRAME_RATIO (0.5)
|
|
|
|
#define SKIP_FRAME_RATIO (0.75)
|
|
|
|
|
|
|
|
/**
|
|
|
|
* Compares the elapsed time from the beginning of GetEncodedTrack and
|
|
|
|
* the processed frame duration in mSourceSegment
|
|
|
|
* in order to set the nextEncodeOperation for next target frame.
|
|
|
|
*/
|
|
|
|
VP8TrackEncoder::EncodeOperation
|
|
|
|
VP8TrackEncoder::GetNextEncodeOperation(TimeDuration aTimeElapsed,
|
2014-09-18 05:20:43 +00:00
|
|
|
StreamTime aProcessedDuration)
|
2014-01-15 06:21:14 +00:00
|
|
|
{
|
2017-10-19 12:38:07 +00:00
|
|
|
if (mFrameDroppingMode == FrameDroppingMode::DISALLOW) {
|
|
|
|
return ENCODE_NORMAL_FRAME;
|
|
|
|
}
|
|
|
|
|
2014-01-15 06:21:14 +00:00
|
|
|
int64_t durationInUsec =
|
2017-01-12 11:19:34 +00:00
|
|
|
FramesToUsecs(aProcessedDuration, mTrackRate).value();
|
2014-01-15 06:21:14 +00:00
|
|
|
if (aTimeElapsed.ToMicroseconds() > (durationInUsec * SKIP_FRAME_RATIO)) {
|
|
|
|
// The encoder is too slow.
|
|
|
|
// We should skip next frame to consume the mSourceSegment.
|
|
|
|
return SKIP_FRAME;
|
|
|
|
} else if (aTimeElapsed.ToMicroseconds() > (durationInUsec * I_FRAME_RATIO)) {
|
|
|
|
// The encoder is a little slow.
|
|
|
|
// We force the encoder to encode an I-frame to accelerate.
|
|
|
|
return ENCODE_I_FRAME;
|
|
|
|
} else {
|
|
|
|
return ENCODE_NORMAL_FRAME;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
/**
|
|
|
|
* Encoding flow in GetEncodedTrack():
|
|
|
|
* 1: Check the mInitialized state and the packet duration.
|
|
|
|
* 2: Move the data from mRawSegment to mSourceSegment.
|
|
|
|
* 3: Encode the video chunks in mSourceSegment in a for-loop.
|
2017-01-12 11:19:34 +00:00
|
|
|
* 3.1: The duration is taken straight from the video chunk's duration.
|
|
|
|
* 3.2: Setup the video chunk with mVPXImageWrapper by PrepareRawFrame().
|
|
|
|
* 3.3: Pass frame to vp8 encoder by vpx_codec_encode().
|
|
|
|
* 3.4: Get the encoded frame from encoder by GetEncodedPartitions().
|
|
|
|
* 3.5: Set the nextEncodeOperation for the next target frame.
|
2014-01-15 06:21:14 +00:00
|
|
|
* There is a heuristic: If the frame duration we have processed in
|
|
|
|
* mSourceSegment is 100ms, means that we can't spend more than 100ms to
|
|
|
|
* encode it.
|
|
|
|
* 4. Remove the encoded chunks in mSourceSegment after for-loop.
|
|
|
|
*/
|
|
|
|
nsresult
|
|
|
|
VP8TrackEncoder::GetEncodedTrack(EncodedFrameContainer& aData)
|
|
|
|
{
|
Bug 1375392 - Tweak the PROFILER_LABEL* macros. r=mstange.
This patch makes the following changes to the macros.
- Removes PROFILER_LABEL_FUNC. It's only suitable for use in functions outside
classes, due to PROFILER_FUNCTION_NAME not getting class names, and it was
mostly misused.
- Removes PROFILER_FUNCTION_NAME. It's no longer used, and __func__ is
universally available now anyway.
- Combines the first two string literal arguments of PROFILER_LABEL and
PROFILER_LABEL_DYNAMIC into a single argument. There was no good reason for
them to be separate, and it forced a '::' in the label, which isn't always
appropriate. Also, the meaning of the "name_space" argument was interpreted
in an interesting variety of ways.
- Adds an "AUTO_" prefix to PROFILER_LABEL and PROFILER_LABEL_DYNAMIC, to make
it clearer they construct RAII objects rather than just being function calls.
(I myself have screwed up the scoping because of this in the past.)
- Fills in the 'js::ProfileEntry::Category::' qualifier within the macro, so
the caller doesn't need to. This makes a *lot* more of the uses fit onto a
single line.
The patch also makes the following changes to the macro uses (beyond those
required by the changes described above).
- Fixes a bunch of labels that had gotten out of sync with the name of the
class and/or function that encloses them.
- Removes a useless PROFILER_LABEL use within a trivial scope in
EventStateManager::DispatchMouseOrPointerEvent(). It clearly wasn't serving
any useful purpose. It also serves as extra evidence that the AUTO_ prefix is
a good idea.
- Tweaks DecodePool::SyncRunIf{Preferred,Possible} so that the labelling is
done within them, instead of at their callsites, because that's a more
standard way of doing things.
--HG--
extra : rebase_source : 318d1bc6fc1425a94aacbf489dd46e4f83211de4
2017-06-22 07:08:53 +00:00
|
|
|
AUTO_PROFILER_LABEL("VP8TrackEncoder::GetEncodedTrack", OTHER);
|
2017-05-24 16:51:47 +00:00
|
|
|
|
|
|
|
MOZ_ASSERT(mInitialized || mCanceled);
|
|
|
|
|
|
|
|
if (mCanceled || mEncodingComplete) {
|
|
|
|
return NS_ERROR_FAILURE;
|
2014-01-15 06:21:14 +00:00
|
|
|
}
|
|
|
|
|
2017-05-24 16:51:47 +00:00
|
|
|
if (!mInitialized) {
|
|
|
|
return NS_ERROR_FAILURE;
|
|
|
|
}
|
|
|
|
|
|
|
|
TakeTrackData(mSourceSegment);
|
|
|
|
|
2014-09-18 05:20:43 +00:00
|
|
|
StreamTime totalProcessedDuration = 0;
|
2014-01-15 06:21:14 +00:00
|
|
|
TimeStamp timebase = TimeStamp::Now();
|
|
|
|
EncodeOperation nextEncodeOperation = ENCODE_NORMAL_FRAME;
|
|
|
|
|
2017-01-12 11:19:34 +00:00
|
|
|
for (VideoSegment::ChunkIterator iter(mSourceSegment);
|
|
|
|
!iter.IsEnded(); iter.Next()) {
|
2014-01-15 06:21:14 +00:00
|
|
|
VideoChunk &chunk = *iter;
|
2016-12-16 03:16:31 +00:00
|
|
|
VP8LOG(LogLevel::Verbose, "nextEncodeOperation is %d for frame of duration %" PRId64,
|
2017-01-18 14:01:47 +00:00
|
|
|
nextEncodeOperation, chunk.GetDuration());
|
2017-01-12 11:19:34 +00:00
|
|
|
|
|
|
|
// Encode frame.
|
|
|
|
if (nextEncodeOperation != SKIP_FRAME) {
|
|
|
|
nsresult rv = PrepareRawFrame(chunk);
|
|
|
|
NS_ENSURE_SUCCESS(rv, NS_ERROR_FAILURE);
|
|
|
|
|
|
|
|
// Encode the data with VP8 encoder
|
2017-01-18 14:01:47 +00:00
|
|
|
int flags = 0;
|
|
|
|
if (nextEncodeOperation == ENCODE_I_FRAME) {
|
|
|
|
VP8LOG(LogLevel::Warning, "MediaRecorder lagging behind. Encoding keyframe.");
|
|
|
|
flags |= VPX_EFLAG_FORCE_KF;
|
|
|
|
}
|
2017-01-12 11:19:34 +00:00
|
|
|
if (vpx_codec_encode(mVPXContext, mVPXImageWrapper, mEncodedTimestamp,
|
|
|
|
(unsigned long)chunk.GetDuration(), flags,
|
|
|
|
VPX_DL_REALTIME)) {
|
2017-04-07 10:13:52 +00:00
|
|
|
VP8LOG(LogLevel::Error, "vpx_codec_encode failed to encode the frame.");
|
2017-01-12 11:19:34 +00:00
|
|
|
return NS_ERROR_FAILURE;
|
2014-01-15 06:21:14 +00:00
|
|
|
}
|
2017-01-12 11:19:34 +00:00
|
|
|
// Get the encoded data from VP8 encoder.
|
2017-01-24 15:19:00 +00:00
|
|
|
rv = GetEncodedPartitions(aData);
|
|
|
|
NS_ENSURE_SUCCESS(rv, NS_ERROR_FAILURE);
|
2017-01-12 11:19:34 +00:00
|
|
|
} else {
|
|
|
|
// SKIP_FRAME
|
|
|
|
// Extend the duration of the last encoded data in aData
|
2017-01-20 14:47:04 +00:00
|
|
|
// because this frame will be skipped.
|
2017-01-18 14:01:47 +00:00
|
|
|
VP8LOG(LogLevel::Warning, "MediaRecorder lagging behind. Skipping a frame.");
|
2017-01-12 11:19:34 +00:00
|
|
|
RefPtr<EncodedFrame> last = aData.GetEncodedFrames().LastElement();
|
|
|
|
if (last) {
|
2017-01-24 15:19:00 +00:00
|
|
|
mExtractedDuration += chunk.mDuration;
|
|
|
|
if (!mExtractedDuration.isValid()) {
|
|
|
|
NS_ERROR("skipped duration overflow");
|
|
|
|
return NS_ERROR_DOM_MEDIA_OVERFLOW_ERR;
|
|
|
|
}
|
|
|
|
|
|
|
|
CheckedInt64 totalDuration = FramesToUsecs(mExtractedDuration.value(), mTrackRate);
|
|
|
|
CheckedInt64 skippedDuration = totalDuration - mExtractedDurationUs;
|
|
|
|
mExtractedDurationUs = totalDuration;
|
|
|
|
if (!skippedDuration.isValid()) {
|
|
|
|
NS_ERROR("skipped duration overflow");
|
|
|
|
return NS_ERROR_DOM_MEDIA_OVERFLOW_ERR;
|
2017-01-20 14:47:04 +00:00
|
|
|
}
|
2017-01-24 15:19:00 +00:00
|
|
|
last->SetDuration(last->GetDuration() +
|
|
|
|
(static_cast<uint64_t>(skippedDuration.value())));
|
2014-01-15 06:21:14 +00:00
|
|
|
}
|
|
|
|
}
|
2017-01-12 11:19:34 +00:00
|
|
|
|
|
|
|
// Move forward the mEncodedTimestamp.
|
|
|
|
mEncodedTimestamp += chunk.GetDuration();
|
|
|
|
totalProcessedDuration += chunk.GetDuration();
|
|
|
|
|
|
|
|
// Check what to do next.
|
|
|
|
TimeDuration elapsedTime = TimeStamp::Now() - timebase;
|
|
|
|
nextEncodeOperation = GetNextEncodeOperation(elapsedTime,
|
|
|
|
totalProcessedDuration);
|
2014-01-15 06:21:14 +00:00
|
|
|
}
|
2017-01-12 11:19:34 +00:00
|
|
|
|
2014-01-15 06:21:14 +00:00
|
|
|
// Remove the chunks we have processed.
|
2017-01-12 11:19:34 +00:00
|
|
|
mSourceSegment.Clear();
|
2014-01-15 06:21:14 +00:00
|
|
|
|
|
|
|
// End of stream, pull the rest frames in encoder.
|
2017-05-24 16:51:47 +00:00
|
|
|
if (mEndOfStream) {
|
2017-01-18 14:01:47 +00:00
|
|
|
VP8LOG(LogLevel::Debug, "mEndOfStream is true");
|
2014-01-15 06:21:14 +00:00
|
|
|
mEncodingComplete = true;
|
2016-02-26 08:07:29 +00:00
|
|
|
// Bug 1243611, keep calling vpx_codec_encode and vpx_codec_get_cx_data
|
|
|
|
// until vpx_codec_get_cx_data return null.
|
|
|
|
do {
|
|
|
|
if (vpx_codec_encode(mVPXContext, nullptr, mEncodedTimestamp,
|
2017-01-12 11:19:34 +00:00
|
|
|
0, 0, VPX_DL_REALTIME)) {
|
2016-02-26 08:07:29 +00:00
|
|
|
return NS_ERROR_FAILURE;
|
|
|
|
}
|
2017-01-24 15:19:00 +00:00
|
|
|
} while(NS_SUCCEEDED(GetEncodedPartitions(aData)));
|
2014-01-15 06:21:14 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
return NS_OK ;
|
|
|
|
}
|
|
|
|
|
|
|
|
} // namespace mozilla
|