Merge b2g-inbound to m-c.

This commit is contained in:
Ryan VanderMeulen 2014-01-19 15:40:48 -05:00
commit fdae3860b4
40 changed files with 1695 additions and 24 deletions

View File

@ -12,7 +12,7 @@
<copyfile dest="Makefile" src="core/root.mk"/>
</project>
<project name="fake-dalvik" path="dalvik" remote="b2g" revision="ca1f327d5acc198bb4be62fa51db2c039032c9ce"/>
<project name="gaia.git" path="gaia" remote="mozillaorg" revision="73a7e0c15969a058964e92fad1925efead38dcfc"/>
<project name="gaia.git" path="gaia" remote="mozillaorg" revision="cf19095a08e1279667b3af0df920c0b0107c8cad"/>
<project name="gonk-misc" path="gonk-misc" remote="b2g" revision="e9b6626eddbc85873eaa2a9174a9bd5101e5c05f"/>
<project name="rilproxy" path="rilproxy" remote="b2g" revision="827214fcf38d6569aeb5c6d6f31cb296d1f09272"/>
<project name="platform_hardware_ril" path="hardware/ril" remote="b2g" revision="eda08beb3ba9a159843c70ffde0f9660ec351eb9"/>

View File

@ -11,7 +11,7 @@
</project>
<project name="rilproxy" path="rilproxy" remote="b2g" revision="827214fcf38d6569aeb5c6d6f31cb296d1f09272"/>
<project name="fake-libdvm" path="dalvik" remote="b2g" revision="d50ae982b19f42f0b66d08b9eb306be81687869f"/>
<project name="gaia" path="gaia" remote="mozillaorg" revision="73a7e0c15969a058964e92fad1925efead38dcfc"/>
<project name="gaia" path="gaia" remote="mozillaorg" revision="cf19095a08e1279667b3af0df920c0b0107c8cad"/>
<project name="gonk-misc" path="gonk-misc" remote="b2g" revision="e9b6626eddbc85873eaa2a9174a9bd5101e5c05f"/>
<project name="moztt" path="external/moztt" remote="b2g" revision="96d2d00165f4561fbde62d1062706eab74b3a01f"/>
<project name="apitrace" path="external/apitrace" remote="apitrace" revision="221bcaecbbbc9d185f691471b64aed9e75b0c11d"/>

View File

@ -12,7 +12,7 @@
<copyfile dest="Makefile" src="core/root.mk"/>
</project>
<project name="fake-dalvik" path="dalvik" remote="b2g" revision="ca1f327d5acc198bb4be62fa51db2c039032c9ce"/>
<project name="gaia.git" path="gaia" remote="mozillaorg" revision="73a7e0c15969a058964e92fad1925efead38dcfc"/>
<project name="gaia.git" path="gaia" remote="mozillaorg" revision="cf19095a08e1279667b3af0df920c0b0107c8cad"/>
<project name="gonk-misc" path="gonk-misc" remote="b2g" revision="e9b6626eddbc85873eaa2a9174a9bd5101e5c05f"/>
<project name="rilproxy" path="rilproxy" remote="b2g" revision="827214fcf38d6569aeb5c6d6f31cb296d1f09272"/>
<project name="platform_hardware_ril" path="hardware/ril" remote="b2g" revision="eda08beb3ba9a159843c70ffde0f9660ec351eb9"/>

View File

@ -1,4 +1,4 @@
{
"revision": "f1421b9d57e81c3823a32eb02e6ab6e3c74b12f1",
"revision": "34bb6cda673afe1b610c2be5611889c38d180b53",
"repo_path": "/integration/gaia-central"
}

View File

@ -11,7 +11,7 @@
<copyfile dest="Makefile" src="core/root.mk"/>
</project>
<project name="fake-dalvik" path="dalvik" remote="b2g" revision="ca1f327d5acc198bb4be62fa51db2c039032c9ce"/>
<project name="gaia.git" path="gaia" remote="mozillaorg" revision="73a7e0c15969a058964e92fad1925efead38dcfc"/>
<project name="gaia.git" path="gaia" remote="mozillaorg" revision="cf19095a08e1279667b3af0df920c0b0107c8cad"/>
<project name="gonk-misc" path="gonk-misc" remote="b2g" revision="e9b6626eddbc85873eaa2a9174a9bd5101e5c05f"/>
<project name="rilproxy" path="rilproxy" remote="b2g" revision="827214fcf38d6569aeb5c6d6f31cb296d1f09272"/>
<project name="librecovery" path="librecovery" remote="b2g" revision="84f2f2fce22605e17d511ff1767e54770067b5b5"/>
@ -95,7 +95,7 @@
<project name="device/qcom/common" path="device/qcom/common" revision="d13aaf080177b7c48f243d51827db5c7a7873cd0"/>
<project name="platform/vendor/qcom/msm7627a" path="device/qcom/msm7627a" revision="f06bcacc6f13cec895dc5d4c2385c076396194ec"/>
<project name="android-device-hamachi" path="device/qcom/hamachi" remote="b2g" revision="9071ac8f0830979fe4a96ce47c7443d8adf0929d"/>
<project name="kernel/msm" path="kernel" revision="1f91c1843268af615bf00d7945948653829ac88a"/>
<project name="kernel/msm" path="kernel" revision="8072055e7094023e2cac8eea425bb785fe1d4066"/>
<project name="platform/hardware/qcom/camera" path="hardware/qcom/camera" revision="19933e5d182a4799c6217b19a18562193a419298"/>
<project name="platform/hardware/qcom/display" path="hardware/qcom/display" revision="5a58382180c70d0c446badc9c9837918ab69ec60"/>
<project name="platform/hardware/qcom/media" path="hardware/qcom/media" revision="20d83ab382a1f813702421e76c2f9f994585990e"/>

View File

@ -10,7 +10,7 @@
<copyfile dest="Makefile" src="core/root.mk"/>
</project>
<project name="fake-dalvik" path="dalvik" remote="b2g" revision="ca1f327d5acc198bb4be62fa51db2c039032c9ce"/>
<project name="gaia.git" path="gaia" remote="mozillaorg" revision="73a7e0c15969a058964e92fad1925efead38dcfc"/>
<project name="gaia.git" path="gaia" remote="mozillaorg" revision="cf19095a08e1279667b3af0df920c0b0107c8cad"/>
<project name="gonk-misc" path="gonk-misc" remote="b2g" revision="e9b6626eddbc85873eaa2a9174a9bd5101e5c05f"/>
<project name="rilproxy" path="rilproxy" remote="b2g" revision="827214fcf38d6569aeb5c6d6f31cb296d1f09272"/>
<project name="librecovery" path="librecovery" remote="b2g" revision="84f2f2fce22605e17d511ff1767e54770067b5b5"/>

View File

@ -12,7 +12,7 @@
<copyfile dest="Makefile" src="core/root.mk"/>
</project>
<project name="fake-dalvik" path="dalvik" remote="b2g" revision="ca1f327d5acc198bb4be62fa51db2c039032c9ce"/>
<project name="gaia.git" path="gaia" remote="mozillaorg" revision="73a7e0c15969a058964e92fad1925efead38dcfc"/>
<project name="gaia.git" path="gaia" remote="mozillaorg" revision="cf19095a08e1279667b3af0df920c0b0107c8cad"/>
<project name="gonk-misc" path="gonk-misc" remote="b2g" revision="e9b6626eddbc85873eaa2a9174a9bd5101e5c05f"/>
<project name="rilproxy" path="rilproxy" remote="b2g" revision="827214fcf38d6569aeb5c6d6f31cb296d1f09272"/>
<project name="librecovery" path="librecovery" remote="b2g" revision="84f2f2fce22605e17d511ff1767e54770067b5b5"/>

View File

@ -11,7 +11,7 @@
<copyfile dest="Makefile" src="core/root.mk"/>
</project>
<project name="fake-dalvik" path="dalvik" remote="b2g" revision="ca1f327d5acc198bb4be62fa51db2c039032c9ce"/>
<project name="gaia.git" path="gaia" remote="mozillaorg" revision="73a7e0c15969a058964e92fad1925efead38dcfc"/>
<project name="gaia.git" path="gaia" remote="mozillaorg" revision="cf19095a08e1279667b3af0df920c0b0107c8cad"/>
<project name="gonk-misc" path="gonk-misc" remote="b2g" revision="e9b6626eddbc85873eaa2a9174a9bd5101e5c05f"/>
<project name="rilproxy" path="rilproxy" remote="b2g" revision="827214fcf38d6569aeb5c6d6f31cb296d1f09272"/>
<project name="librecovery" path="librecovery" remote="b2g" revision="84f2f2fce22605e17d511ff1767e54770067b5b5"/>

View File

@ -11,7 +11,7 @@
</project>
<project name="rilproxy" path="rilproxy" remote="b2g" revision="827214fcf38d6569aeb5c6d6f31cb296d1f09272"/>
<project name="fake-libdvm" path="dalvik" remote="b2g" revision="d50ae982b19f42f0b66d08b9eb306be81687869f"/>
<project name="gaia" path="gaia" remote="mozillaorg" revision="73a7e0c15969a058964e92fad1925efead38dcfc"/>
<project name="gaia" path="gaia" remote="mozillaorg" revision="cf19095a08e1279667b3af0df920c0b0107c8cad"/>
<project name="gonk-misc" path="gonk-misc" remote="b2g" revision="e9b6626eddbc85873eaa2a9174a9bd5101e5c05f"/>
<project name="moztt" path="external/moztt" remote="b2g" revision="96d2d00165f4561fbde62d1062706eab74b3a01f"/>
<project name="apitrace" path="external/apitrace" remote="apitrace" revision="221bcaecbbbc9d185f691471b64aed9e75b0c11d"/>

View File

@ -11,7 +11,7 @@
<copyfile dest="Makefile" src="core/root.mk"/>
</project>
<project name="fake-dalvik" path="dalvik" remote="b2g" revision="ca1f327d5acc198bb4be62fa51db2c039032c9ce"/>
<project name="gaia.git" path="gaia" remote="mozillaorg" revision="73a7e0c15969a058964e92fad1925efead38dcfc"/>
<project name="gaia.git" path="gaia" remote="mozillaorg" revision="cf19095a08e1279667b3af0df920c0b0107c8cad"/>
<project name="gonk-misc" path="gonk-misc" remote="b2g" revision="e9b6626eddbc85873eaa2a9174a9bd5101e5c05f"/>
<project name="rilproxy" path="rilproxy" remote="b2g" revision="827214fcf38d6569aeb5c6d6f31cb296d1f09272"/>
<project name="librecovery" path="librecovery" remote="b2g" revision="84f2f2fce22605e17d511ff1767e54770067b5b5"/>

View File

@ -245,6 +245,8 @@ if test -n "$gonkdir" ; then
MOZ_B2G_CAMERA=1
MOZ_OMX_DECODER=1
AC_SUBST(MOZ_OMX_DECODER)
MOZ_OMX_ENCODER=1
AC_SUBST(MOZ_OMX_ENCODER)
;;
19)
GONK_INCLUDES="-I$gonkdir/frameworks/native/include"

View File

@ -0,0 +1,13 @@
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this file,
# You can obtain one at http://mozilla.org/MPL/2.0/.
include $(topsrcdir)/config/rules.mk
# These includes are from Android JB, for use of MediaCodec.
INCLUDES += \
-I$(topsrcdir)/ipc/chromium/src \
-I$(ANDROID_SOURCE)/frameworks/native/opengl/include/ \
-I$(ANDROID_SOURCE)/frameworks/native/include/ \
-I$(ANDROID_SOURCE)/frameworks/av/include/media/ \
$(NULL)

View File

@ -0,0 +1,261 @@
/* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*-*/
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this file,
* You can obtain one at http://mozilla.org/MPL/2.0/. */
#include "OmxTrackEncoder.h"
#include "OMXCodecWrapper.h"
#include "VideoUtils.h"
#include "ISOTrackMetadata.h"
#ifdef MOZ_WIDGET_GONK
#include <android/log.h>
#define OMX_LOG(args...) \
do { \
__android_log_print(ANDROID_LOG_INFO, "OmxTrackEncoder", ##args); \
} while (0)
#else
#define OMX_LOG(args, ...)
#endif
using namespace android;
namespace mozilla {
#define ENCODER_CONFIG_FRAME_RATE 30 // fps
#define GET_ENCODED_VIDEO_FRAME_TIMEOUT 100000 // microseconds
nsresult
OmxVideoTrackEncoder::Init(int aWidth, int aHeight, TrackRate aTrackRate)
{
mFrameWidth = aWidth;
mFrameHeight = aHeight;
mTrackRate = aTrackRate;
mEncoder = OMXCodecWrapper::CreateAVCEncoder();
NS_ENSURE_TRUE(mEncoder, NS_ERROR_FAILURE);
nsresult rv = mEncoder->Configure(mFrameWidth, mFrameHeight,
ENCODER_CONFIG_FRAME_RATE);
ReentrantMonitorAutoEnter mon(mReentrantMonitor);
mInitialized = (rv == NS_OK);
mReentrantMonitor.NotifyAll();
return rv;
}
already_AddRefed<TrackMetadataBase>
OmxVideoTrackEncoder::GetMetadata()
{
{
// Wait if mEncoder is not initialized nor is being canceled.
ReentrantMonitorAutoEnter mon(mReentrantMonitor);
while (!mCanceled && !mInitialized) {
mReentrantMonitor.Wait();
}
}
if (mCanceled || mEncodingComplete) {
return nullptr;
}
nsRefPtr<AVCTrackMetadata> meta = new AVCTrackMetadata();
meta->Width = mFrameWidth;
meta->Height = mFrameHeight;
meta->FrameRate = ENCODER_CONFIG_FRAME_RATE;
meta->VideoFrequency = 90000; // Hz
return meta.forget();
}
nsresult
OmxVideoTrackEncoder::GetEncodedTrack(EncodedFrameContainer& aData)
{
VideoSegment segment;
{
// Move all the samples from mRawSegment to segment. We only hold the
// monitor in this block.
ReentrantMonitorAutoEnter mon(mReentrantMonitor);
// Wait if mEncoder is not initialized nor is being canceled.
while (!mCanceled && (!mInitialized ||
(mRawSegment.GetDuration() == 0 && !mEndOfStream))) {
mReentrantMonitor.Wait();
}
if (mCanceled || mEncodingComplete) {
return NS_ERROR_FAILURE;
}
segment.AppendFrom(&mRawSegment);
}
// Start queuing raw frames to the input buffers of OMXCodecWrapper.
VideoSegment::ChunkIterator iter(segment);
while (!iter.IsEnded()) {
VideoChunk chunk = *iter;
// Send only the unique video frames to OMXCodecWrapper.
if (mLastFrame != chunk.mFrame) {
uint64_t totalDurationUs = mTotalFrameDuration * USECS_PER_S / mTrackRate;
layers::Image* img = (chunk.IsNull() || chunk.mFrame.GetForceBlack()) ?
nullptr : chunk.mFrame.GetImage();
mEncoder->Encode(img, mFrameWidth, mFrameHeight, totalDurationUs);
}
mLastFrame.TakeFrom(&chunk.mFrame);
mTotalFrameDuration += chunk.GetDuration();
iter.Next();
}
// Send the EOS signal to OMXCodecWrapper.
if (mEndOfStream && iter.IsEnded() && !mEosSetInEncoder) {
mEosSetInEncoder = true;
uint64_t totalDurationUs = mTotalFrameDuration * USECS_PER_S / mTrackRate;
layers::Image* img = (!mLastFrame.GetImage() || mLastFrame.GetForceBlack())
? nullptr : mLastFrame.GetImage();
mEncoder->Encode(img, mFrameWidth, mFrameHeight, totalDurationUs,
OMXCodecWrapper::BUFFER_EOS);
}
// Dequeue an encoded frame from the output buffers of OMXCodecWrapper.
nsTArray<uint8_t> buffer;
int outFlags = 0;
int64_t outTimeStampUs = 0;
mEncoder->GetNextEncodedFrame(&buffer, &outTimeStampUs, &outFlags,
GET_ENCODED_VIDEO_FRAME_TIMEOUT);
if (!buffer.IsEmpty()) {
nsRefPtr<EncodedFrame> videoData = new EncodedFrame();
if (outFlags & OMXCodecWrapper::BUFFER_CODEC_CONFIG) {
videoData->SetFrameType(EncodedFrame::AVC_CSD);
} else {
videoData->SetFrameType((outFlags & OMXCodecWrapper::BUFFER_SYNC_FRAME) ?
EncodedFrame::I_FRAME : EncodedFrame::P_FRAME);
}
videoData->SetFrameData(&buffer);
videoData->SetTimeStamp(outTimeStampUs);
aData.AppendEncodedFrame(videoData);
}
if (outFlags & OMXCodecWrapper::BUFFER_EOS) {
mEncodingComplete = true;
OMX_LOG("Done encoding video.");
}
return NS_OK;
}
nsresult
OmxAudioTrackEncoder::Init(int aChannels, int aSamplingRate)
{
mChannels = aChannels;
mSamplingRate = aSamplingRate;
mEncoder = OMXCodecWrapper::CreateAACEncoder();
NS_ENSURE_TRUE(mEncoder, NS_ERROR_FAILURE);
nsresult rv = mEncoder->Configure(mChannels, mSamplingRate);
ReentrantMonitorAutoEnter mon(mReentrantMonitor);
mInitialized = (rv == NS_OK);
mReentrantMonitor.NotifyAll();
return NS_OK;
}
already_AddRefed<TrackMetadataBase>
OmxAudioTrackEncoder::GetMetadata()
{
{
// Wait if mEncoder is not initialized nor is being canceled.
ReentrantMonitorAutoEnter mon(mReentrantMonitor);
while (!mCanceled && !mInitialized) {
mReentrantMonitor.Wait();
}
}
if (mCanceled || mEncodingComplete) {
return nullptr;
}
nsRefPtr<AACTrackMetadata> meta = new AACTrackMetadata();
meta->Channels = mChannels;
meta->SampleRate = mSamplingRate;
meta->FrameSize = OMXCodecWrapper::kAACFrameSize;
meta->FrameDuration = OMXCodecWrapper::kAACFrameDuration;
return meta.forget();
}
nsresult
OmxAudioTrackEncoder::AppendEncodedFrames(EncodedFrameContainer& aContainer)
{
nsTArray<uint8_t> frameData;
int outFlags = 0;
int64_t outTimeUs = -1;
nsresult rv = mEncoder->GetNextEncodedFrame(&frameData, &outTimeUs, &outFlags,
3000); // wait up to 3ms
NS_ENSURE_SUCCESS(rv, rv);
if (!frameData.IsEmpty()) {
bool isCSD = false;
if (outFlags & OMXCodecWrapper::BUFFER_CODEC_CONFIG) { // codec specific data
isCSD = true;
} else if (outFlags & OMXCodecWrapper::BUFFER_EOS) { // last frame
mEncodingComplete = true;
} else {
MOZ_ASSERT(frameData.Length() == OMXCodecWrapper::kAACFrameSize);
}
nsRefPtr<EncodedFrame> audiodata = new EncodedFrame();
audiodata->SetFrameType(isCSD ?
EncodedFrame::AAC_CSD : EncodedFrame::AUDIO_FRAME);
audiodata->SetTimeStamp(outTimeUs);
audiodata->SetFrameData(&frameData);
aContainer.AppendEncodedFrame(audiodata);
}
return NS_OK;
}
nsresult
OmxAudioTrackEncoder::GetEncodedTrack(EncodedFrameContainer& aData)
{
AudioSegment segment;
// Move all the samples from mRawSegment to segment. We only hold
// the monitor in this block.
{
ReentrantMonitorAutoEnter mon(mReentrantMonitor);
// Wait if mEncoder is not initialized nor canceled.
while (!mInitialized && !mCanceled) {
mReentrantMonitor.Wait();
}
if (mCanceled || mEncodingComplete) {
return NS_ERROR_FAILURE;
}
segment.AppendFrom(&mRawSegment);
}
if (!mEosSetInEncoder) {
if (mEndOfStream) {
mEosSetInEncoder = true;
}
if (segment.GetDuration() > 0 || mEndOfStream) {
// Notify EOS at least once, even when segment is empty.
nsresult rv = mEncoder->Encode(segment,
mEndOfStream ? OMXCodecWrapper::BUFFER_EOS : 0);
NS_ENSURE_SUCCESS(rv, rv);
}
}
return AppendEncodedFrames(aData);
}
}

View File

@ -0,0 +1,66 @@
/* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*-*/
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this file,
* You can obtain one at http://mozilla.org/MPL/2.0/. */
#ifndef OmxTrackEncoder_h_
#define OmxTrackEncoder_h_
#include "TrackEncoder.h"
namespace android {
class OMXVideoEncoder;
class OMXAudioEncoder;
}
/**
* There are two major classes defined in file OmxTrackEncoder;
* OmxVideoTrackEncoder and OmxAudioTrackEncoder, the video and audio track
* encoder for media type AVC/H.264 and AAC. OMXCodecWrapper wraps and controls
* an instance of MediaCodec, defined in libstagefright, runs on Android Jelly
* Bean platform.
*/
namespace mozilla {
class OmxVideoTrackEncoder: public VideoTrackEncoder
{
public:
OmxVideoTrackEncoder()
: VideoTrackEncoder()
{}
already_AddRefed<TrackMetadataBase> GetMetadata() MOZ_OVERRIDE;
nsresult GetEncodedTrack(EncodedFrameContainer& aData) MOZ_OVERRIDE;
protected:
nsresult Init(int aWidth, int aHeight, TrackRate aTrackRate) MOZ_OVERRIDE;
private:
nsAutoPtr<android::OMXVideoEncoder> mEncoder;
};
class OmxAudioTrackEncoder MOZ_FINAL : public AudioTrackEncoder
{
public:
OmxAudioTrackEncoder()
: AudioTrackEncoder()
{}
already_AddRefed<TrackMetadataBase> GetMetadata() MOZ_OVERRIDE;
nsresult GetEncodedTrack(EncodedFrameContainer& aData) MOZ_OVERRIDE;
protected:
nsresult Init(int aChannels, int aSamplingRate) MOZ_OVERRIDE;
private:
// Append encoded frames to aContainer.
nsresult AppendEncodedFrames(EncodedFrameContainer& aContainer);
nsAutoPtr<android::OMXAudioEncoder> mEncoder;
};
}
#endif

View File

@ -24,6 +24,13 @@ if CONFIG['MOZ_OPUS']:
EXPORTS += ['OpusTrackEncoder.h']
UNIFIED_SOURCES += ['OpusTrackEncoder.cpp']
if CONFIG['MOZ_OMX_ENCODER']:
DEFINES['MOZ_OMX_ENCODER'] = True
EXPORTS += ['OmxTrackEncoder.h']
UNIFIED_SOURCES += ['OmxTrackEncoder.cpp']
FAIL_ON_WARNINGS = True
FINAL_LIBRARY = 'gklayout'
include('/ipc/chromium/chromium-config.mozbuild')

View File

@ -13,3 +13,10 @@ CXXFLAGS += \
-I$(ANDROID_SOURCE)/frameworks/base/include/media/stagefright/openmax \
-I$(ANDROID_SOURCE)/frameworks/base/media/libstagefright/include/ \
$(NULL)
# These includes are for Android JB, using MediaCodec on OMXCodec.
INCLUDES += \
-I$(ANDROID_SOURCE)/frameworks/native/opengl/include/ \
-I$(ANDROID_SOURCE)/frameworks/native/include/ \
-I$(ANDROID_SOURCE)/frameworks/av/include/media/ \
$(NULL)

View File

@ -0,0 +1,273 @@
/* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*-*/
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this file,
* You can obtain one at http://mozilla.org/MPL/2.0/. */
#include "OMXCodecDescriptorUtil.h"
namespace android {
// NAL unit start code.
static const uint8_t kNALUnitStartCode[] = { 0x00, 0x00, 0x00, 0x01 };
// This class is used to generate AVC/H.264 decoder config descriptor blob from
// the sequence parameter set(SPS) + picture parameter set(PPS) data.
//
// SPS + PPS format:
// --- SPS NAL unit ---
// Start code <0x00 0x00 0x00 0x01> (4 bytes)
// NAL unit type <0x07> (5 bits)
// SPS (1+ bytes)
// ...
// --- PPS NAL unit ---
// Start code <0x00 0x00 0x00 0x01> (4 bytes)
// NAL unit type <0x08> (5 bits)
// PPS (1+ bytes)
// ...
// --- End ---
//
// Descriptor format:
// --- Header (5 bytes) ---
// Version <0x01> (1 byte)
// Profile (1 byte)
// Compatible profiles (1 byte)
// Level (1 byte)
// Reserved <111111> (6 bits)
// NAL length type (2 bits)
// --- Parameter sets ---
// Reserved <111> (3 bits)
// Number of SPS (5 bits)
// SPS (3+ bytes)
// Length (2 bytes)
// SPS NAL unit (1+ bytes)
// ...
// Number of PPS (1 byte)
// PPS (3+ bytes)
// Length (2 bytes)
// PPS NAL unit (1+ bytes)
// ...
// --- End ---
class AVCDecodeConfigDescMaker {
public:
// Convert SPS + PPS data to decoder config descriptor blob. aParamSets
// contains the source data, and the generated blob will be appended to
// aOutputBuf.
status_t ConvertParamSetsToDescriptorBlob(ABuffer* aParamSets,
nsTArray<uint8_t>* aOutputBuf)
{
uint8_t header[] = {
0x01, // Version.
0x00, // Will be filled with 'profile' when parsing SPS later.
0x00, // Will be filled with 'compatible profiles' when parsing SPS later.
0x00, // Will be filled with 'level' when parsing SPS later.
0xFF, // 6 bits reserved value <111111> + 2 bits NAL length type <11>
};
size_t paramSetsSize = ParseParamSets(aParamSets, header);
NS_ENSURE_TRUE(paramSetsSize > 0, ERROR_MALFORMED);
// Extra 1 byte for number of SPS & the other for number of PPS.
aOutputBuf->SetCapacity(sizeof(header) + paramSetsSize + 2);
// 5 bytes Header.
aOutputBuf->AppendElements(header, sizeof(header));
// 3 bits <111> + 5 bits number of SPS.
uint8_t n = mSPS.Length();
aOutputBuf->AppendElement(0xE0 | n);
// SPS NAL units.
for (int i = 0; i < n; i++) {
mSPS.ElementAt(i).AppendTo(aOutputBuf);
}
// 1 byte number of PPS.
n = mPPS.Length();
aOutputBuf->AppendElement(n);
// PPS NAL units.
for (int i = 0; i < n; i++) {
mPPS.ElementAt(i).AppendTo(aOutputBuf);
}
return OK;
}
private:
// Sequence parameter set or picture parameter set.
struct AVCParamSet {
AVCParamSet(const uint8_t* aPtr, const size_t aSize)
: mPtr(aPtr)
, mSize(aSize)
{}
// Append 2 bytes length value and NAL unit bitstream to aOutputBuf.
void AppendTo(nsTArray<uint8_t>* aOutputBuf)
{
MOZ_ASSERT(mPtr && mSize > 0);
// 2 bytes length value.
uint8_t size[] = {
(mSize & 0xFF00) >> 8, // MSB.
mSize & 0x00FF, // LSB.
};
aOutputBuf->AppendElements(size, sizeof(size));
aOutputBuf->AppendElements(mPtr, mSize);
}
const uint8_t* mPtr; // Pointer to NAL unit.
const size_t mSize; // NAL unit length in bytes.
};
// NAL unit types.
enum {
kNALUnitTypeSPS = 0x07, // Value for sequence parameter set.
kNALUnitTypePPS = 0x08, // Value for picture parameter set.
};
// Search for next start code to determine the location of parameter set data
// and save the result to corresponding parameter set arrays. The search range
// is from aPtr to (aPtr + aSize - 4), and aType indicates which array to save
// the result.
// The size (in bytes) of found parameter set will be stored in
// aParameterSize.
// This function also returns the pointer to found start code that caller can
// use for the next iteration of search. If the returned pointer is beyond
// the end of search range, it means no start code is found.
uint8_t* ParseParamSet(uint8_t* aPtr, size_t aSize, uint8_t aType,
size_t* aParamSetSize)
{
MOZ_ASSERT(aPtr && aSize > 0);
MOZ_ASSERT(aType == kNALUnitTypeSPS || aType == kNALUnitTypePPS);
MOZ_ASSERT(aParamSetSize);
// Find next start code.
size_t index = 0;
size_t end = aSize - sizeof(kNALUnitStartCode);
uint8_t* nextStartCode = aPtr;
while (index <= end &&
memcmp(kNALUnitStartCode, aPtr + index, sizeof(kNALUnitStartCode))) {
++index;
}
if (index <= end) {
// Found.
nextStartCode = aPtr + index;
} else {
nextStartCode = aPtr + aSize;
}
*aParamSetSize = nextStartCode - aPtr;
NS_ENSURE_TRUE(*aParamSetSize > 0, nullptr);
AVCParamSet paramSet(aPtr, *aParamSetSize);
if (aType == kNALUnitTypeSPS) {
// SPS should have at least 4 bytes.
NS_ENSURE_TRUE(*aParamSetSize >= 4, nullptr);
mSPS.AppendElement(paramSet);
} else {
mPPS.AppendElement(paramSet);
}
return nextStartCode;
}
// Walk through SPS + PPS data and save the pointer & size of each parameter
// set to corresponding arrays. It also fills several values in aHeader.
// Will return total size of all parameter sets, or 0 if fail to parse.
size_t ParseParamSets(ABuffer* aParamSets, uint8_t* aHeader)
{
// Data starts with a start code.
// SPS and PPS are separated with start codes.
// Also, SPS must come before PPS
uint8_t type = kNALUnitTypeSPS;
bool hasSPS = false;
bool hasPPS = false;
uint8_t* ptr = aParamSets->data();
uint8_t* nextStartCode = ptr;
size_t remain = aParamSets->size();
size_t paramSetSize = 0;
size_t totalSize = 0;
// Examine
while (remain > sizeof(kNALUnitStartCode) &&
!memcmp(kNALUnitStartCode, ptr, sizeof(kNALUnitStartCode))) {
ptr += sizeof(kNALUnitStartCode);
remain -= sizeof(kNALUnitStartCode);
// NAL unit format is defined in ISO/IEC 14496-10 7.3.1:
// --- NAL unit ---
// Reserved <111> (3 bits)
// Type (5 bits)
// Parameter set (4+ bytes for SPS, 1+ bytes for PPS)
// --- End ---
type = (ptr[0] & 0x1F);
if (type == kNALUnitTypeSPS) {
// SPS must come before PPS.
NS_ENSURE_FALSE(hasPPS, 0);
if (!hasSPS) {
// SPS contains some header values.
aHeader[1] = ptr[1]; // Profile.
aHeader[2] = ptr[2]; // Compatible Profiles.
aHeader[3] = ptr[3]; // Level.
hasSPS = true;
}
nextStartCode = ParseParamSet(ptr, remain, type, &paramSetSize);
} else if (type == kNALUnitTypePPS) {
// SPS must come before PPS.
NS_ENSURE_TRUE(hasSPS, 0);
if (!hasPPS) {
hasPPS = true;
}
nextStartCode = ParseParamSet(ptr, remain, type, &paramSetSize);
} else {
// Should never contain NAL unit other than SPS or PPS.
NS_ENSURE_TRUE(false, 0);
}
NS_ENSURE_TRUE(nextStartCode, 0);
// Move to next start code.
remain -= (nextStartCode - ptr);
ptr = nextStartCode;
totalSize += (2 + paramSetSize); // 2 bytes length + NAL unit.
}
// Sanity check on the number of parameter sets.
size_t n = mSPS.Length();
NS_ENSURE_TRUE(n > 0 && n <= 0x1F, 0); // 5 bits length only.
n = mPPS.Length();
NS_ENSURE_TRUE(n > 0 && n <= 0xFF, 0); // 1 byte length only.
return totalSize;
}
nsTArray<AVCParamSet> mSPS;
nsTArray<AVCParamSet> mPPS;
};
// Blob from OMX encoder could be in descriptor format already, or sequence
// parameter set(SPS) + picture parameter set(PPS). If later, it needs to be
// parsed and converted into descriptor format.
// See MPEG4Writer::Track::makeAVCCodecSpecificData() and
// MPEG4Writer::Track::writeAvccBox() implementation in libstagefright.
status_t
GenerateAVCDescriptorBlob(ABuffer* aData, nsTArray<uint8_t>* aOutputBuf)
{
const size_t csdSize = aData->size();
const uint8_t* csd = aData->data();
MOZ_ASSERT(csdSize > sizeof(kNALUnitStartCode),
"Size of codec specific data is too short. "
"There could be a serious problem in MediaCodec.");
NS_ENSURE_TRUE(csdSize > sizeof(kNALUnitStartCode), ERROR_MALFORMED);
if (memcmp(csd, kNALUnitStartCode, sizeof(kNALUnitStartCode))) {
// Already in descriptor format. It should has at least 13 bytes.
NS_ENSURE_TRUE(csdSize >= 13, ERROR_MALFORMED);
aOutputBuf->AppendElements(aData->data(), csdSize);
} else {
// In SPS + PPS format. Generate descriptor blob from parameters sets.
AVCDecodeConfigDescMaker maker;
status_t result = maker.ConvertParamSetsToDescriptorBlob(aData, aOutputBuf);
NS_ENSURE_TRUE(result == OK, result);
}
return OK;
}
} // namespace android

View File

@ -0,0 +1,23 @@
/* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*-*/
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this file,
* You can obtain one at http://mozilla.org/MPL/2.0/. */
#ifndef OMXCodecDescriptorUtil_h_
#define OMXCodecDescriptorUtil_h_
#include <stagefright/foundation/ABuffer.h>
#include <stagefright/MediaErrors.h>
#include <nsTArray.h>
namespace android {
// Generate decoder config descriptor (defined in ISO/IEC 14496-15 5.2.4.1.1)
// for AVC/H.264 using codec config blob from encoder.
status_t GenerateAVCDescriptorBlob(ABuffer* aData,
nsTArray<uint8_t>* aOutputBuf);
}
#endif // OMXCodecDescriptorUtil_h_

View File

@ -0,0 +1,609 @@
/* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*-*/
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this file,
* You can obtain one at http://mozilla.org/MPL/2.0/. */
#include "OMXCodecWrapper.h"
#include "OMXCodecDescriptorUtil.h"
#include "TrackEncoder.h"
#include <binder/ProcessState.h>
#include <media/ICrypto.h>
#include <media/IOMX.h>
#include <OMX_Component.h>
#include <stagefright/MediaDefs.h>
#include <stagefright/MediaErrors.h>
#include "AudioChannelFormat.h"
#include <mozilla/Monitor.h>
using namespace mozilla;
using namespace mozilla::gfx;
using namespace mozilla::layers;
#define ENCODER_CONFIG_BITRATE 2000000 // bps
// How many seconds between I-frames.
#define ENCODER_CONFIG_I_FRAME_INTERVAL 1
// Wait up to 5ms for input buffers.
#define INPUT_BUFFER_TIMEOUT_US (5 * 1000ll)
#define CODEC_ERROR(args...) \
do { \
__android_log_print(ANDROID_LOG_ERROR, "OMXCodecWrapper", ##args); \
} while (0)
namespace android {
OMXAudioEncoder*
OMXCodecWrapper::CreateAACEncoder()
{
nsAutoPtr<OMXAudioEncoder> aac(new OMXAudioEncoder(CodecType::AAC_ENC));
// Return the object only when media codec is valid.
NS_ENSURE_TRUE(aac->IsValid(), nullptr);
return aac.forget();
}
OMXVideoEncoder*
OMXCodecWrapper::CreateAVCEncoder()
{
nsAutoPtr<OMXVideoEncoder> avc(new OMXVideoEncoder(CodecType::AVC_ENC));
// Return the object only when media codec is valid.
NS_ENSURE_TRUE(avc->IsValid(), nullptr);
return avc.forget();
}
OMXCodecWrapper::OMXCodecWrapper(CodecType aCodecType)
: mStarted(false)
{
ProcessState::self()->startThreadPool();
mLooper = new ALooper();
mLooper->start();
if (aCodecType == CodecType::AVC_ENC) {
mCodec = MediaCodec::CreateByType(mLooper, MEDIA_MIMETYPE_VIDEO_AVC, true);
} else if (aCodecType == CodecType::AAC_ENC) {
mCodec = MediaCodec::CreateByType(mLooper, MEDIA_MIMETYPE_AUDIO_AAC, true);
} else {
NS_ERROR("Unknown codec type.");
}
}
OMXCodecWrapper::~OMXCodecWrapper()
{
if (mCodec.get()) {
Stop();
mCodec->release();
}
mLooper->stop();
}
status_t
OMXCodecWrapper::Start()
{
// Already started.
NS_ENSURE_FALSE(mStarted, OK);
status_t result = mCodec->start();
mStarted = (result == OK);
// Get references to MediaCodec buffers.
if (result == OK) {
mCodec->getInputBuffers(&mInputBufs);
mCodec->getOutputBuffers(&mOutputBufs);
}
return result;
}
status_t
OMXCodecWrapper::Stop()
{
// Already stopped.
NS_ENSURE_TRUE(mStarted, OK);
status_t result = mCodec->stop();
mStarted = !(result == OK);
return result;
}
nsresult
OMXVideoEncoder::Configure(int aWidth, int aHeight, int aFrameRate)
{
MOZ_ASSERT(!mStarted, "Configure() was called already.");
NS_ENSURE_TRUE(aWidth > 0 && aHeight > 0 && aFrameRate > 0,
NS_ERROR_INVALID_ARG);
// Set up configuration parameters for AVC/H.264 encoder.
sp<AMessage> format = new AMessage;
// Fixed values
format->setString("mime", MEDIA_MIMETYPE_VIDEO_AVC);
format->setInt32("bitrate", ENCODER_CONFIG_BITRATE);
format->setInt32("i-frame-interval", ENCODER_CONFIG_I_FRAME_INTERVAL);
// See mozilla::layers::GrallocImage, supports YUV 4:2:0, CbCr width and
// height is half that of Y
format->setInt32("color-format", OMX_COLOR_FormatYUV420SemiPlanar);
format->setInt32("profile", OMX_VIDEO_AVCProfileBaseline);
format->setInt32("level", OMX_VIDEO_AVCLevel3);
format->setInt32("bitrate-mode", OMX_Video_ControlRateConstant);
format->setInt32("store-metadata-in-buffers", 0);
format->setInt32("prepend-sps-pps-to-idr-frames", 0);
// Input values.
format->setInt32("width", aWidth);
format->setInt32("height", aHeight);
format->setInt32("stride", aWidth);
format->setInt32("slice-height", aHeight);
format->setInt32("frame-rate", aFrameRate);
status_t result = mCodec->configure(format, nullptr, nullptr,
MediaCodec::CONFIGURE_FLAG_ENCODE);
NS_ENSURE_TRUE(result == OK, NS_ERROR_FAILURE);
mWidth = aWidth;
mHeight = aHeight;
result = Start();
return result == OK ? NS_OK : NS_ERROR_FAILURE;
}
// Copy pixels from planar YUV (4:4:4/4:2:2/4:2:0) or NV21 (semi-planar 4:2:0)
// format to NV12 (semi-planar 4:2:0) format for QCOM HW encoder.
// Planar YUV: YYY...UUU...VVV...
// NV21: YYY...VUVU...
// NV12: YYY...UVUV...
// For 4:4:4/4:2:2 -> 4:2:0, subsample using odd row/column without
// interpolation.
// aSource contains info about source image data, and the result will be stored
// in aDestination, whose size needs to be >= Y plane size * 3 / 2.
static
void
ConvertPlanarYCbCrToNV12(const PlanarYCbCrData* aSource, uint8_t* aDestination)
{
// Fill Y plane.
uint8_t* y = aSource->mYChannel;
IntSize ySize = aSource->mYSize;
// Y plane.
for (int i = 0; i < ySize.height; i++) {
memcpy(aDestination, y, ySize.width);
aDestination += ySize.width;
y += aSource->mYStride;
}
// Fill interleaved UV plane.
uint8_t* u = aSource->mCbChannel;
uint8_t* v = aSource->mCrChannel;
IntSize uvSize = aSource->mCbCrSize;
// Subsample to 4:2:0 if source is 4:4:4 or 4:2:2.
// Y plane width & height should be multiple of U/V plane width & height.
MOZ_ASSERT(ySize.width % uvSize.width == 0 &&
ySize.height % uvSize.height == 0);
size_t uvWidth = ySize.width / 2;
size_t uvHeight = ySize.height / 2;
size_t horiSubsample = uvSize.width / uvWidth;
size_t uPixStride = horiSubsample * (1 + aSource->mCbSkip);
size_t vPixStride = horiSubsample * (1 + aSource->mCrSkip);
size_t lineStride = uvSize.height / uvHeight * aSource->mCbCrStride;
for (int i = 0; i < uvHeight; i++) {
// 1st pixel per line.
uint8_t* uSrc = u;
uint8_t* vSrc = v;
for (int j = 0; j < uvWidth; j++) {
*aDestination++ = *uSrc;
*aDestination++ = *vSrc;
// Pick next source pixel.
uSrc += uPixStride;
vSrc += vPixStride;
}
// Pick next source line.
u += lineStride;
v += lineStride;
}
}
nsresult
OMXVideoEncoder::Encode(const Image* aImage, int aWidth, int aHeight,
int64_t aTimestamp, int aInputFlags)
{
MOZ_ASSERT(mStarted, "Configure() should be called before Encode().");
NS_ENSURE_TRUE(aWidth == mWidth && aHeight == mHeight && aTimestamp >= 0,
NS_ERROR_INVALID_ARG);
status_t result;
// Dequeue an input buffer.
uint32_t index;
result = mCodec->dequeueInputBuffer(&index, INPUT_BUFFER_TIMEOUT_US);
NS_ENSURE_TRUE(result == OK, NS_ERROR_FAILURE);
const sp<ABuffer>& inBuf = mInputBufs.itemAt(index);
uint8_t* dst = inBuf->data();
size_t dstSize = inBuf->capacity();
size_t yLen = aWidth * aHeight;
size_t uvLen = yLen / 2;
// Buffer should be large enough to hold input image data.
MOZ_ASSERT(dstSize >= yLen + uvLen);
inBuf->setRange(0, yLen + uvLen);
if (!aImage) {
// Generate muted/black image directly in buffer.
dstSize = yLen + uvLen;
// Fill Y plane.
memset(dst, 0x10, yLen);
// Fill UV plane.
memset(dst + yLen, 0x80, uvLen);
} else {
Image* img = const_cast<Image*>(aImage);
ImageFormat format = img->GetFormat();
MOZ_ASSERT(aWidth == img->GetSize().width &&
aHeight == img->GetSize().height);
if (format == GRALLOC_PLANAR_YCBCR) {
// Get graphic buffer pointer.
void* imgPtr = nullptr;
GrallocImage* nativeImage = static_cast<GrallocImage*>(img);
SurfaceDescriptor handle = nativeImage->GetSurfaceDescriptor();
SurfaceDescriptorGralloc gralloc = handle.get_SurfaceDescriptorGralloc();
sp<GraphicBuffer> graphicBuffer = GrallocBufferActor::GetFrom(gralloc);
graphicBuffer->lock(GraphicBuffer::USAGE_SW_READ_MASK, &imgPtr);
uint8_t* src = static_cast<uint8_t*>(imgPtr);
// Only support NV21 for now.
MOZ_ASSERT(graphicBuffer->getPixelFormat() ==
HAL_PIXEL_FORMAT_YCrCb_420_SP);
// Build PlanarYCbCrData for NV21 buffer.
PlanarYCbCrData nv21;
// Y plane.
nv21.mYChannel = src;
nv21.mYSize.width = aWidth;
nv21.mYSize.height = aHeight;
nv21.mYStride = aWidth;
nv21.mYSkip = 0;
// Interleaved VU plane.
nv21.mCrChannel = src + yLen;
nv21.mCrSkip = 1;
nv21.mCbChannel = nv21.mCrChannel + 1;
nv21.mCbSkip = 1;
nv21.mCbCrStride = aWidth;
// 4:2:0.
nv21.mCbCrSize.width = aWidth / 2;
nv21.mCbCrSize.height = aHeight / 2;
ConvertPlanarYCbCrToNV12(&nv21, dst);
graphicBuffer->unlock();
} else if (format == PLANAR_YCBCR) {
ConvertPlanarYCbCrToNV12(static_cast<PlanarYCbCrImage*>(img)->GetData(),
dst);
} else {
// TODO: support RGB to YUV color conversion.
NS_ERROR("Unsupported input image type.");
}
}
// Queue this input buffer.
result = mCodec->queueInputBuffer(index, 0, dstSize, aTimestamp, aInputFlags);
return result == OK ? NS_OK : NS_ERROR_FAILURE;
}
status_t
OMXVideoEncoder::AppendDecoderConfig(nsTArray<uint8_t>* aOutputBuf,
ABuffer* aData)
{
// AVC/H.264 decoder config descriptor is needed to construct MP4 'avcC' box
// (defined in ISO/IEC 14496-15 5.2.4.1.1).
return GenerateAVCDescriptorBlob(aData, aOutputBuf);
}
// Override to replace NAL unit start code with 4-bytes unit length.
// See ISO/IEC 14496-15 5.2.3.
void OMXVideoEncoder::AppendFrame(nsTArray<uint8_t>* aOutputBuf,
const uint8_t* aData, size_t aSize)
{
uint8_t length[] = {
(aSize >> 24) & 0xFF,
(aSize >> 16) & 0xFF,
(aSize >> 8) & 0xFF,
aSize & 0xFF,
};
aOutputBuf->SetCapacity(aSize);
aOutputBuf->AppendElements(length, sizeof(length));
aOutputBuf->AppendElements(aData + sizeof(length), aSize);
}
nsresult
OMXAudioEncoder::Configure(int aChannels, int aSamplingRate)
{
MOZ_ASSERT(!mStarted);
NS_ENSURE_TRUE(aChannels > 0 && aSamplingRate > 0, NS_ERROR_INVALID_ARG);
// Set up configuration parameters for AAC encoder.
sp<AMessage> format = new AMessage;
// Fixed values.
format->setString("mime", MEDIA_MIMETYPE_AUDIO_AAC);
format->setInt32("bitrate", kAACBitrate);
format->setInt32("aac-profile", OMX_AUDIO_AACObjectLC);
// Input values.
format->setInt32("channel-count", aChannels);
format->setInt32("sample-rate", aSamplingRate);
status_t result = mCodec->configure(format, nullptr, nullptr,
MediaCodec::CONFIGURE_FLAG_ENCODE);
NS_ENSURE_TRUE(result == OK, NS_ERROR_FAILURE);
mChannels = aChannels;
mSampleDuration = 1000000 / aSamplingRate;
result = Start();
return result == OK ? NS_OK : NS_ERROR_FAILURE;
}
class InputBufferHelper MOZ_FINAL {
public:
InputBufferHelper(sp<MediaCodec>& aCodec, Vector<sp<ABuffer> >& aBuffers)
: mCodec(aCodec)
, mBuffers(aBuffers)
, mIndex(0)
, mData(nullptr)
, mOffset(0)
, mCapicity(0)
{}
~InputBufferHelper()
{
// Unflushed data in buffer.
MOZ_ASSERT(!mData);
}
status_t Dequeue()
{
// Shouldn't have dequeued buffer.
MOZ_ASSERT(!mData);
status_t result = mCodec->dequeueInputBuffer(&mIndex,
INPUT_BUFFER_TIMEOUT_US);
NS_ENSURE_TRUE(result == OK, result);
sp<ABuffer> inBuf = mBuffers.itemAt(mIndex);
mData = inBuf->data();
mCapicity = inBuf->capacity();
mOffset = 0;
return OK;
}
uint8_t* GetPointer() { return mData + mOffset; }
const size_t AvailableSize() { return mCapicity - mOffset; }
void IncreaseOffset(size_t aValue)
{
// Should never out of bound.
MOZ_ASSERT(mOffset + aValue <= mCapicity);
mOffset += aValue;
}
status_t Enqueue(int64_t aTimestamp, int aFlags)
{
// Should have dequeued buffer.
MOZ_ASSERT(mData);
// Queue this buffer.
status_t result = mCodec->queueInputBuffer(mIndex, 0, mOffset, aTimestamp,
aFlags);
NS_ENSURE_TRUE(result == OK, result);
mData = nullptr;
return OK;
}
private:
sp<MediaCodec>& mCodec;
Vector<sp<ABuffer> >& mBuffers;
size_t mIndex;
uint8_t* mData;
size_t mCapicity;
size_t mOffset;
};
nsresult
OMXAudioEncoder::Encode(const AudioSegment& aSegment, int aInputFlags)
{
#ifndef MOZ_SAMPLE_TYPE_S16
#error MediaCodec accepts only 16-bit PCM data.
#endif
MOZ_ASSERT(mStarted, "Configure() should be called before Encode().");
size_t numSamples = aSegment.GetDuration();
// Get input buffer.
InputBufferHelper buffer(mCodec, mInputBufs);
status_t result = buffer.Dequeue();
NS_ENSURE_TRUE(result == OK, NS_ERROR_FAILURE);
size_t samplesCopied = 0; // Number of copied samples.
if (numSamples > 0) {
// Copy input PCM data to input buffer until queue is empty.
AudioSegment::ChunkIterator iter(const_cast<AudioSegment&>(aSegment));
while (!iter.IsEnded()) {
AudioChunk chunk = *iter;
size_t samplesToCopy = chunk.GetDuration(); // Number of samples to copy.
size_t bytesToCopy = samplesToCopy * mChannels * sizeof(AudioDataValue);
if (bytesToCopy > buffer.AvailableSize()) {
// Not enough space left in input buffer. Send it to encoder and get a
// new one.
// Don't signal EOS since there is more data to copy.
result = buffer.Enqueue(mTimestamp, aInputFlags & ~BUFFER_EOS);
NS_ENSURE_TRUE(result == OK, NS_ERROR_FAILURE);
mTimestamp += samplesCopied * mSampleDuration;
samplesCopied = 0;
result = buffer.Dequeue();
NS_ENSURE_TRUE(result == OK, NS_ERROR_FAILURE);
}
AudioDataValue* dst = reinterpret_cast<AudioDataValue*>(buffer.GetPointer());
if (!chunk.IsNull()) {
// Append the interleaved data to input buffer.
AudioTrackEncoder::InterleaveTrackData(chunk, samplesToCopy, mChannels,
dst);
} else {
// Silence.
memset(dst, 0, bytesToCopy);
}
samplesCopied += samplesToCopy;
buffer.IncreaseOffset(bytesToCopy);
iter.Next();
}
} else if (aInputFlags & BUFFER_EOS) {
// No audio data left in segment but we still have to feed something to
// MediaCodec in order to notify EOS.
size_t bytesToCopy = mChannels * sizeof(AudioDataValue);
memset(buffer.GetPointer(), 0, bytesToCopy);
buffer.IncreaseOffset(bytesToCopy);
samplesCopied = 1;
}
if (samplesCopied > 0) {
result = buffer.Enqueue(mTimestamp, aInputFlags);
NS_ENSURE_TRUE(result == OK, NS_ERROR_FAILURE);
mTimestamp += samplesCopied * mSampleDuration;
}
return NS_OK;
}
// Generate decoder config descriptor (defined in ISO/IEC 14496-1 8.3.4.1) for
// AAC. The hard-coded bytes are copied from
// MPEG4Writer::Track::writeMp4aEsdsBox() implementation in libstagefright.
status_t
OMXAudioEncoder::AppendDecoderConfig(nsTArray<uint8_t>* aOutputBuf,
ABuffer* aData)
{
MOZ_ASSERT(aData);
const size_t csdSize = aData->size();
// See
// http://wiki.multimedia.cx/index.php?title=Understanding_AAC#Packaging.2FEncapsulation_And_Setup_Data
// AAC decoder specific descriptor contains 2 bytes.
NS_ENSURE_TRUE(csdSize == 2, ERROR_MALFORMED);
// Encoder output must be consistent with kAACFrameDuration:
// 14th bit (frame length flag) == 0 => 1024 (kAACFrameDuration) samples.
NS_ENSURE_TRUE((aData->data()[1] & 0x04) == 0, ERROR_MALFORMED);
// Decoder config descriptor
const uint8_t decConfig[] = {
0x04, // Decoder config descriptor tag.
15 + csdSize, // Size: following bytes + csd size.
0x40, // Object type: MPEG-4 audio.
0x15, // Stream type: audio, reserved: 1.
0x00, 0x03, 0x00, // Buffer size: 768 (kAACFrameSize).
0x00, 0x01, 0x77, 0x00, // Max bitrate: 96000 (kAACBitrate).
0x00, 0x01, 0x77, 0x00, // Avg bitrate: 96000 (kAACBitrate).
0x05, // Decoder specific descriptor tag.
csdSize, // Data size.
};
// SL config descriptor.
const uint8_t slConfig[] = {
0x06, // SL config descriptor tag.
0x01, // Size.
0x02, // Fixed value.
};
aOutputBuf->SetCapacity(sizeof(decConfig) + csdSize + sizeof(slConfig));
aOutputBuf->AppendElements(decConfig, sizeof(decConfig));
aOutputBuf->AppendElements(aData->data(), csdSize);
aOutputBuf->AppendElements(slConfig, sizeof(slConfig));
return OK;
}
nsresult
OMXCodecWrapper::GetNextEncodedFrame(nsTArray<uint8_t>* aOutputBuf,
int64_t* aOutputTimestamp,
int* aOutputFlags, int64_t aTimeOut)
{
MOZ_ASSERT(mStarted,
"Configure() should be called before GetNextEncodedFrame().");
// Dequeue a buffer from output buffers.
size_t index = 0;
size_t outOffset = 0;
size_t outSize = 0;
int64_t outTimeUs = 0;
uint32_t outFlags = 0;
bool retry = false;
do {
status_t result = mCodec->dequeueOutputBuffer(&index, &outOffset, &outSize,
&outTimeUs, &outFlags,
aTimeOut);
switch (result) {
case OK:
break;
case INFO_OUTPUT_BUFFERS_CHANGED:
// Update our references to new buffers.
result = mCodec->getOutputBuffers(&mOutputBufs);
// Get output from a new buffer.
retry = true;
break;
case INFO_FORMAT_CHANGED:
// It's okay: for encoder, MediaCodec reports this only to inform caller
// that there will be a codec config buffer next.
return NS_OK;
case -EAGAIN:
// Output buffer not available. Caller can try again later.
return NS_OK;
default:
CODEC_ERROR("MediaCodec error:%d", result);
MOZ_ASSERT(false, "MediaCodec error.");
return NS_ERROR_FAILURE;
}
} while (retry);
if (aOutputBuf) {
aOutputBuf->Clear();
const sp<ABuffer> omxBuf = mOutputBufs.itemAt(index);
if (outFlags & MediaCodec::BUFFER_FLAG_CODECCONFIG) {
// Codec specific data.
if (AppendDecoderConfig(aOutputBuf, omxBuf.get()) != OK) {
mCodec->releaseOutputBuffer(index);
return NS_ERROR_FAILURE;
}
} else {
AppendFrame(aOutputBuf, omxBuf->data(), omxBuf->size());
}
}
mCodec->releaseOutputBuffer(index);
if (aOutputTimestamp) {
*aOutputTimestamp = outTimeUs;
}
if (aOutputFlags) {
*aOutputFlags = outFlags;
}
return NS_OK;
}
}

View File

@ -0,0 +1,266 @@
/* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*-*/
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this file,
* You can obtain one at http://mozilla.org/MPL/2.0/. */
#ifndef OMXCodecWrapper_h_
#define OMXCodecWrapper_h_
#include <gui/Surface.h>
#include <stagefright/foundation/ABuffer.h>
#include <stagefright/foundation/AMessage.h>
#include <stagefright/MediaCodec.h>
#include "AudioSegment.h"
#include "GonkNativeWindow.h"
#include "GonkNativeWindowClient.h"
namespace android {
class OMXAudioEncoder;
class OMXVideoEncoder;
/**
* This class (and its subclasses) wraps the video and audio codec from
* MediaCodec API in libstagefright. Currently only AVC/H.264 video encoder and
* AAC audio encoder are supported.
*
* OMXCodecWrapper has static creator functions that returns actual codec
* instances for different types of codec supported and serves as superclass to
* provide a function to read encoded data as byte array from codec. Two
* subclasses, OMXAudioEncoder and OMXVideoEncoder, respectively provides
* functions for encoding data from audio and video track.
*
* A typical usage is as follows:
* - Call one of the creator function Create...() to get either a
* OMXAudioEncoder or OMXVideoEncoder object.
* - Configure codec by providing characteristics of input raw data, such as
* video frame width and height, using Configure().
* - Send raw data (and notify end of stream) with Encode().
* - Get encoded data through GetNextEncodedFrame().
* - Repeat previous 2 steps until end of stream.
* - Destroy the object.
*
* The lifecycle of underlying OMX codec is binded with construction and
* destruction of OMXCodecWrapper and subclass objects. For some types of
* codecs, such as HW accelerated AVC/H.264 encoder, there can be only one
* instance system-wise at a time, attempting to create another instance will
* fail.
*/
class OMXCodecWrapper
{
public:
// Codec types.
enum CodecType {
AAC_ENC, // AAC encoder.
AVC_ENC, // AVC/H.264 encoder.
TYPE_COUNT
};
// Input and output flags.
enum {
// For Encode() and Encode, it indicates the end of input stream;
// For GetNextEncodedFrame(), it indicates the end of output
// stream.
BUFFER_EOS = MediaCodec::BUFFER_FLAG_EOS,
// For GetNextEncodedFrame(). It indicates the output buffer is an I-frame.
BUFFER_SYNC_FRAME = MediaCodec::BUFFER_FLAG_SYNCFRAME,
// For GetNextEncodedFrame(). It indicates that the output buffer contains
// codec specific configuration info. (SPS & PPS for AVC/H.264;
// DecoderSpecificInfo for AAC)
BUFFER_CODEC_CONFIG = MediaCodec::BUFFER_FLAG_CODECCONFIG,
};
// Hard-coded values for AAC DecoderConfigDescriptor in libstagefright.
// See MPEG4Writer::Track::writeMp4aEsdsBox()
// Exposed for the need of MP4 container writer.
enum {
kAACBitrate = 96000, // kbps
kAACFrameSize = 768, // bytes
kAACFrameDuration = 1024, // How many samples per AAC frame.
};
/** Create a AAC audio encoder. Returns nullptr when failed. */
static OMXAudioEncoder* CreateAACEncoder();
/** Create a AVC/H.264 video encoder. Returns nullptr when failed. */
static OMXVideoEncoder* CreateAVCEncoder();
virtual ~OMXCodecWrapper();
/**
* Get the next available encoded data from MediaCodec. The data will be
* copied into aOutputBuf array, with its timestamp (in microseconds) in
* aOutputTimestamp.
* Wait at most aTimeout microseconds to dequeue a output buffer.
*/
nsresult GetNextEncodedFrame(nsTArray<uint8_t>* aOutputBuf,
int64_t* aOutputTimestamp, int* aOutputFlags,
int64_t aTimeOut);
protected:
/**
* See whether the object has been initialized successfully and is ready to
* use.
*/
virtual bool IsValid() { return mCodec != nullptr; }
/**
* Construct codec specific configuration blob with given data aData generated
* by media codec and append it into aOutputBuf. Needed by MP4 container
* writer for generating decoder config box. Returns OK if succeed.
*/
virtual status_t AppendDecoderConfig(nsTArray<uint8_t>* aOutputBuf,
ABuffer* aData) = 0;
/**
* Append encoded frame data generated by media codec (stored in aData and
* is aSize bytes long) into aOutputBuf. Subclasses can override this function
* to process the data for specific container writer.
*/
virtual void AppendFrame(nsTArray<uint8_t>* aOutputBuf,
const uint8_t* aData, size_t aSize)
{
aOutputBuf->AppendElements(aData, aSize);
}
private:
// Hide these. User should always use creator functions to get a media codec.
OMXCodecWrapper() MOZ_DELETE;
OMXCodecWrapper(const OMXCodecWrapper&) MOZ_DELETE;
OMXCodecWrapper& operator=(const OMXCodecWrapper&) MOZ_DELETE;
/**
* Create a media codec of given type. It will be a AVC/H.264 video encoder if
* aCodecType is CODEC_AVC_ENC, or AAC audio encoder if aCodecType is
* CODEC_AAC_ENC.
*/
OMXCodecWrapper(CodecType aCodecType);
// For subclasses to access hidden constructor and implementation details.
friend class OMXAudioEncoder;
friend class OMXVideoEncoder;
/**
* Start the media codec.
*/
status_t Start();
/**
* Stop the media codec.
*/
status_t Stop();
// The actual codec instance provided by libstagefright.
sp<MediaCodec> mCodec;
// A dedicate message loop with its own thread used by MediaCodec.
sp<ALooper> mLooper;
Vector<sp<ABuffer> > mInputBufs; // MediaCodec buffers to hold input data.
Vector<sp<ABuffer> > mOutputBufs; // MediaCodec buffers to hold output data.
bool mStarted; // Has MediaCodec been started?
};
/**
* Audio encoder.
*/
class OMXAudioEncoder MOZ_FINAL : public OMXCodecWrapper
{
public:
/**
* Configure audio codec parameters and start media codec. It must be called
* before calling Encode() and GetNextEncodedFrame().
*/
nsresult Configure(int aChannelCount, int aSampleRate);
/**
* Encode 16-bit PCM audio samples stored in aSegment. To notify end of
* stream, set aInputFlags to BUFFER_EOS.
*/
nsresult Encode(const mozilla::AudioSegment& aSegment, int aInputFlags = 0);
protected:
virtual status_t AppendDecoderConfig(nsTArray<uint8_t>* aOutputBuf,
ABuffer* aData) MOZ_OVERRIDE;
private:
// Hide these. User should always use creator functions to get a media codec.
OMXAudioEncoder() MOZ_DELETE;
OMXAudioEncoder(const OMXAudioEncoder&) MOZ_DELETE;
OMXAudioEncoder& operator=(const OMXAudioEncoder&) MOZ_DELETE;
/**
* Create a audio codec. It will be a AAC encoder if aCodecType is
* CODEC_AAC_ENC.
*/
OMXAudioEncoder(CodecType aCodecType)
: OMXCodecWrapper(aCodecType)
, mChannels(0)
, mTimestamp(0)
, mSampleDuration(0) {}
// For creator function to access hidden constructor.
friend class OMXCodecWrapper;
// Number of audio channels.
size_t mChannels;
// The total duration of audio samples that have been encoded in microseconds.
int64_t mTimestamp;
// Time per audio sample in microseconds.
int64_t mSampleDuration;
};
/**
* Video encoder.
*/
class OMXVideoEncoder MOZ_FINAL : public OMXCodecWrapper
{
public:
/**
* Configure video codec parameters and start media codec. It must be called
* before calling Encode() and GetNextEncodedFrame().
*/
nsresult Configure(int aWidth, int aHeight, int aFrameRate);
/**
* Encode a aWidth pixels wide and aHeight pixels tall video frame of
* semi-planar YUV420 format stored in the buffer of aImage. aTimestamp gives
* the frame timestamp/presentation time (in microseconds). To notify end of
* stream, set aInputFlags to BUFFER_EOS.
*/
nsresult Encode(const mozilla::layers::Image* aImage, int aWidth, int aHeight,
int64_t aTimestamp, int aInputFlags = 0);
protected:
virtual status_t AppendDecoderConfig(nsTArray<uint8_t>* aOutputBuf,
ABuffer* aData) MOZ_OVERRIDE;
// AVC/H.264 encoder replaces NAL unit start code with the unit length as
// specified in ISO/IEC 14496-15 5.2.3.
virtual void AppendFrame(nsTArray<uint8_t>* aOutputBuf,
const uint8_t* aData, size_t aSize) MOZ_OVERRIDE;
private:
// Hide these. User should always use creator functions to get a media codec.
OMXVideoEncoder() MOZ_DELETE;
OMXVideoEncoder(const OMXVideoEncoder&) MOZ_DELETE;
OMXVideoEncoder& operator=(const OMXVideoEncoder&) MOZ_DELETE;
/**
* Create a video codec. It will be a AVC/H.264 encoder if aCodecType is
* CODEC_AVC_ENC.
*/
OMXVideoEncoder(CodecType aCodecType)
: OMXCodecWrapper(aCodecType), mWidth(0), mHeight(0) {}
// For creator function to access hidden constructor.
friend class OMXCodecWrapper;
int mWidth;
int mHeight;
};
} // namespace android
#endif // OMXCodecWrapper_h_

View File

@ -16,6 +16,15 @@ SOURCES += [
'OmxDecoder.cpp',
]
if CONFIG['MOZ_OMX_ENCODER']:
EXPORTS += [
'OMXCodecWrapper.h',
]
SOURCES += [
'OMXCodecDescriptorUtil.cpp',
'OMXCodecWrapper.cpp',
]
if 'rtsp' in CONFIG['NECKO_PROTOCOLS']:
EXPORTS += [
'RtspOmxDecoder.h',

View File

@ -1824,6 +1824,14 @@ Navigator::HasNfcPeerSupport(JSContext* /* unused */, JSObject* aGlobal)
nsCOMPtr<nsPIDOMWindow> win = GetWindowFromGlobal(aGlobal);
return win && CheckPermission(win, "nfc-write");
}
/* static */
bool
Navigator::HasNfcManagerSupport(JSContext* /* unused */, JSObject* aGlobal)
{
nsCOMPtr<nsPIDOMWindow> win = GetWindowFromGlobal(aGlobal);
return win && CheckPermission(win, "nfc-manager");
}
#endif // MOZ_NFC
#ifdef MOZ_TIME_MANAGER

View File

@ -272,6 +272,7 @@ public:
#ifdef MOZ_NFC
static bool HasNfcSupport(JSContext* /* unused */, JSObject* aGlobal);
static bool HasNfcPeerSupport(JSContext* /* unused */, JSObject* aGlobal);
static bool HasNfcManagerSupport(JSContext* /* unused */, JSObject* aGlobal);
#endif // MOZ_NFC
#ifdef MOZ_TIME_MANAGER
static bool HasTimeSupport(JSContext* /* unused */, JSObject* aGlobal);

View File

@ -34,6 +34,8 @@ const PROPERTIES = [
"url", "impp", "tel"
];
let mozContactInitWarned = false;
function Contact() { }
Contact.prototype = {
@ -43,6 +45,19 @@ Contact.prototype = {
}
},
init: function(aProp) {
// init is deprecated, warn once in the console if it's used
if (!mozContactInitWarned) {
mozContactInitWarned = true;
Cu.reportError("mozContact.init is DEPRECATED. Use the mozContact constructor instead. " +
"See https://developer.mozilla.org/docs/WebAPI/Contacts for details.");
}
for (let prop of PROPERTIES) {
this[prop] = aProp[prop];
}
},
setMetadata: function(aId, aPublished, aUpdated) {
this.id = aId;
if (aPublished) {

View File

@ -768,6 +768,38 @@ var steps = [
req.onerror = onFailure;
},
clearDatabase,
function() {
ok(true, "mozContact.init deprecation message");
var c = new mozContact();
SimpleTest.monitorConsole(next, [
{ errorMessage: "mozContact.init is DEPRECATED. Use the mozContact constructor instead. " +
"See https://developer.mozilla.org/docs/WebAPI/Contacts for details." }
], /* forbidUnexpectedMsgs */ true);
c.init({name: ["Bar"]});
c.init({name: ["Bar"]});
SimpleTest.endMonitorConsole();
},
function() {
ok(true, "mozContact.init works as expected");
var c = new mozContact({name: ["Foo"]});
c.init({name: ["Bar"]});
ise(c.name[0], "Bar", "Same name");
next();
},
function() {
ok(true, "mozContact.init without parameters");
var c = new mozContact({name: ["Foo"]});
c.init();
next();
},
function() {
ok(true, "mozContact.init resets properties");
var c = new mozContact({jobTitle: ["Software Engineer"]});
c.init({nickname: ["Jobless Johnny"]});
ise(c.nickname[0], "Jobless Johnny", "Same nickname");
ok(!c.jobTitle, "jobTitle is not set");
next();
},
function () {
ok(true, "all done!\n");
SimpleTest.finish();

View File

@ -18,7 +18,9 @@ XPCOMUtils.defineLazyServiceGetter(this, "cpmm",
"nsIMessageSender");
function debug(aStr) {
#ifdef MOZ_DEBUG
dump("-*- DownloadsAPI.js : " + aStr + "\n");
#endif
}
function DOMDownloadManagerImpl() {

View File

@ -19,7 +19,9 @@ XPCOMUtils.defineLazyServiceGetter(this, "ppmm",
"nsIMessageBroadcaster");
function debug(aStr) {
#ifdef MOZ_DEBUG
dump("-*- DownloadsAPI.jsm : " + aStr + "\n");
#endif
}
function sendPromiseMessage(aMm, aMessageName, aData, aError) {

View File

@ -27,7 +27,9 @@ XPCOMUtils.defineLazyServiceGetter(this, "cpmm",
*/
function debug(aStr) {
#ifdef MOZ_DEBUG
dump("-*- DownloadsIPC.jsm : " + aStr + "\n");
#endif
}
const ipcMessages = ["Downloads:Added",

View File

@ -5,11 +5,14 @@
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
EXTRA_COMPONENTS += [
'DownloadsAPI.js',
'DownloadsAPI.manifest',
]
EXTRA_JS_MODULES += [
EXTRA_PP_COMPONENTS = [
'DownloadsAPI.js',
]
EXTRA_PP_JS_MODULES += [
'DownloadsAPI.jsm',
'DownloadsIPC.jsm',
]

View File

@ -243,6 +243,17 @@ XPCOMUtils.defineLazyGetter(this, "gMessageManager", function () {
}
},
removePeerTarget: function removePeerTarget(target) {
let targets = this.peerTargetsMap;
Object.keys(targets).forEach((appId) => {
let targetInfo = targets[appId];
if (targetInfo && targetInfo.target === target) {
// Remove the target from the list of registered targets
delete targets[appId];
}
});
},
isRegisteredP2PTarget: function isRegisteredP2PTarget(appId, event) {
let targetInfo = this.peerTargetsMap[appId];
// Check if it is a registered target for the 'event'
@ -281,6 +292,7 @@ XPCOMUtils.defineLazyGetter(this, "gMessageManager", function () {
// already forgotten its permissions so we need to unregister the target
// for every permission.
this._unregisterMessageTarget(null, msg.target);
this.removePeerTarget(msg.target);
return null;
}

View File

@ -91,6 +91,8 @@ interface mozContact {
[Cached, Pure] attribute sequence<DOMString>? note;
[Cached, Pure] attribute sequence<DOMString>? key;
void init(optional ContactProperties properties);
[ChromeOnly]
void setMetadata(DOMString id, Date? published, Date? updated);

View File

@ -4,13 +4,9 @@
/* Copyright © 2013 Deutsche Telekom, Inc. */
[JSImplementation="@mozilla.org/navigatorNfc;1",
NavigatorProperty="mozNfc",
Func="Navigator::HasNfcSupport"]
interface MozNfc : EventTarget {
MozNFCTag getNFCTag(DOMString sessionId);
MozNFCPeer getNFCPeer(DOMString sessionId);
[NoInterfaceObject,
Func="Navigator::HasNfcManagerSupport"]
interface MozNfcManager {
/**
* API to check if the given application's manifest
* URL is registered with the Chrome Process or not.
@ -22,13 +18,27 @@ interface MozNfc : EventTarget {
* and 'nfc-write'
*/
DOMRequest checkP2PRegistration(DOMString manifestUrl);
};
[JSImplementation="@mozilla.org/navigatorNfc;1",
NavigatorProperty="mozNfc",
Func="Navigator::HasNfcSupport"]
interface MozNfc : EventTarget {
MozNFCTag getNFCTag(DOMString sessionId);
MozNFCPeer getNFCPeer(DOMString sessionId);
[Func="Navigator::HasNfcPeerSupport"]
attribute EventHandler onpeerready;
[Func="Navigator::HasNfcPeerSupport"]
attribute EventHandler onpeerlost;
};
// Mozilla Only
partial interface MozNfc {
[ChromeOnly]
void eventListenerWasAdded(DOMString aType);
[ChromeOnly]
void eventListenerWasRemoved(DOMString aType);
};
MozNfc implements MozNfcManager;

View File

@ -304,6 +304,11 @@ public:
gfx::Float aOpacity, const gfx::Matrix4x4 &aTransform)
{ /* Should turn into pure virtual once implemented in D3D */ }
/*
* Clear aRect on FrameBuffer.
*/
virtual void clearFBRect(const gfx::Rect* aRect) { }
/**
* Start a new frame.
*

View File

@ -318,6 +318,12 @@ ContainerRender(ContainerT* aContainer,
// Composer2D will compose this layer so skip GPU composition
// this time & reset composition flag for next composition phase
layerToRender->SetLayerComposited(false);
if (layerToRender->GetClearFB()) {
// Clear layer's visible rect on FrameBuffer with transparent pixels
gfx::Rect aRect(clipRect.x, clipRect.y, clipRect.width, clipRect.height);
compositor->clearFBRect(&aRect);
layerToRender->SetClearFB(false);
}
} else {
layerToRender->RenderLayer(clipRect);
}

View File

@ -764,6 +764,7 @@ LayerComposite::LayerComposite(LayerManagerComposite *aManager)
, mShadowTransformSetByAnimation(false)
, mDestroyed(false)
, mLayerComposited(false)
, mClearFB(false)
{ }
LayerComposite::~LayerComposite()

View File

@ -358,6 +358,11 @@ public:
mLayerComposited = value;
}
void SetClearFB(bool value)
{
mClearFB = value;
}
// These getters can be used anytime.
float GetShadowOpacity() { return mShadowOpacity; }
const nsIntRect* GetShadowClipRect() { return mUseShadowClipRect ? &mShadowClipRect : nullptr; }
@ -365,6 +370,7 @@ public:
const gfx3DMatrix& GetShadowTransform() { return mShadowTransform; }
bool GetShadowTransformSetByAnimation() { return mShadowTransformSetByAnimation; }
bool HasLayerBeenComposited() { return mLayerComposited; }
bool GetClearFB() { return mClearFB; }
protected:
gfx3DMatrix mShadowTransform;
@ -377,6 +383,7 @@ protected:
bool mShadowTransformSetByAnimation;
bool mDestroyed;
bool mLayerComposited;
bool mClearFB;
};

View File

@ -767,6 +767,18 @@ CalculatePOTSize(const IntSize& aSize, GLContext* gl)
return IntSize(NextPowerOfTwo(aSize.width), NextPowerOfTwo(aSize.height));
}
void
CompositorOGL::clearFBRect(const gfx::Rect* aRect)
{
if (!aRect) {
return;
}
ScopedScissorRect autoScissorRect(mGLContext, aRect->x, aRect->y, aRect->width, aRect->height);
mGLContext->fClearColor(0.0, 0.0, 0.0, 0.0);
mGLContext->fClear(LOCAL_GL_COLOR_BUFFER_BIT | LOCAL_GL_DEPTH_BUFFER_BIT);
}
void
CompositorOGL::BeginFrame(const nsIntRegion& aInvalidRegion,
const Rect *aClipRectIn,

View File

@ -252,6 +252,11 @@ private:
*/
bool mFrameInProgress;
/*
* Clear aRect on FrameBuffer.
*/
virtual void clearFBRect(const gfx::Rect* aRect);
/* Start a new frame. If aClipRectIn is null and aClipRectOut is non-null,
* sets *aClipRectOut to the screen dimensions.
*/

View File

@ -41,16 +41,19 @@ def MergeProfiles(files):
threads.append(thread)
# Note that pid + sym, pid + location could be ambigious
# if we had pid=11 sym=1 && pid=1 sym=11. To avoid this we format
# pidStr with leading zeros.
pidStr = "%05d" % (int(pid))
# if we had pid=11 sym=1 && pid=1 sym=11.
pidStr = pid + ":"
thread['startTime'] = fileData['profileJSON']['meta']['startTime']
samples = thread['samples']
for sample in thread['samples']:
for frame in sample['frames']:
if "location" in frame and frame['location'][0:2] == '0x':
frame['location'] = pidStr + frame['location']
oldLoc = frame['location']
newLoc = pidStr + oldLoc
frame['location'] = newLoc
# Default to the unprefixed symbol if no translation is available
symTable[newLoc] = oldLoc
filesyms = fileData['symbolicationTable']
for sym in filesyms.keys():

View File

@ -507,6 +507,13 @@ HwcComposer2D::TryHwComposition()
// Overlay Composition, set layer composition flag
// on mapped LayerComposite to skip GPU composition
mHwcLayerMap[k]->SetLayerComposited(true);
if (k && (mList->hwLayers[k].hints & HWC_HINT_CLEAR_FB) &&
(mList->hwLayers[k].blending == HWC_BLENDING_NONE)) {
// Clear visible rect on FB with transparent pixels.
// Never clear the 1st layer since we're guaranteed
// that FB is already cleared.
mHwcLayerMap[k]->SetClearFB(true);
}
break;
default:
break;