merge autoland to mozilla-central. r=merge a=merge

MozReview-Commit-ID: 2MWgC2ySf0i
This commit is contained in:
Sebastian Hengst 2017-05-29 00:49:24 +02:00
commit ab557674e4
16 changed files with 2679 additions and 68 deletions

View File

@ -36,11 +36,6 @@ fi
ldflags="$ldflags -Wl,-no_data_in_code_info"
export LDFLAGS="$ldflags"
# Until bug 1342503 is fixed, we can't build some of the webrender dependencies
# on buildbot OS X builders, because rustc will use some random system toolchain
# instead of the one we package with tooltool.
ac_add_options --disable-webrender
# If not set use the system default clang
if [ -z "$CC" ]; then
export CC=clang

View File

@ -342,6 +342,7 @@ private:
void setAnchorFocusRange(int32_t aIndex); // pass in index into mRanges;
// negative value clears
// mAnchorFocusRange
void SelectFramesForContent(nsIContent* aContent, bool aSelected);
nsresult SelectAllFramesForContent(nsIContentIterator *aInnerIter,
nsIContent *aContent,
bool aSelected);

View File

@ -4464,46 +4464,53 @@ Selection::GetPrimaryFrameForFocusNode(nsIFrame** aReturnFrame,
return NS_OK;
}
void
Selection::SelectFramesForContent(nsIContent* aContent,
bool aSelected)
{
nsIFrame* frame = aContent->GetPrimaryFrame();
if (!frame) {
return;
}
// The frame could be an SVG text frame, in which case we don't treat it
// as a text frame.
if (frame->IsTextFrame()) {
nsTextFrame* textFrame = static_cast<nsTextFrame*>(frame);
textFrame->SetSelectedRange(0, aContent->GetText()->GetLength(),
aSelected, mSelectionType);
} else {
frame->InvalidateFrameSubtree(); // frame continuations?
}
}
//select all content children of aContent
nsresult
Selection::SelectAllFramesForContent(nsIContentIterator* aInnerIter,
nsIContent* aContent,
bool aSelected)
{
nsresult result = aInnerIter->Init(aContent);
nsIFrame *frame;
if (NS_SUCCEEDED(result))
{
// First select frame of content passed in
frame = aContent->GetPrimaryFrame();
if (frame && frame->IsTextFrame()) {
nsTextFrame* textFrame = static_cast<nsTextFrame*>(frame);
textFrame->SetSelectedRange(0, aContent->GetText()->GetLength(),
aSelected, mSelectionType);
}
// Now iterated through the child frames and set them
while (!aInnerIter->IsDone()) {
nsCOMPtr<nsIContent> innercontent =
do_QueryInterface(aInnerIter->GetCurrentNode());
frame = innercontent->GetPrimaryFrame();
if (frame) {
if (frame->IsTextFrame()) {
nsTextFrame* textFrame = static_cast<nsTextFrame*>(frame);
textFrame->SetSelectedRange(0, innercontent->GetText()->GetLength(),
aSelected, mSelectionType);
} else {
frame->InvalidateFrameSubtree(); // frame continuations?
}
}
aInnerIter->Next();
}
// If aContent doesn't have children, we should avoid to use the content
// iterator for performance reason.
if (!aContent->HasChildren()) {
SelectFramesForContent(aContent, aSelected);
return NS_OK;
}
return NS_ERROR_FAILURE;
if (NS_WARN_IF(NS_FAILED(aInnerIter->Init(aContent)))) {
return NS_ERROR_FAILURE;
}
for (; !aInnerIter->IsDone(); aInnerIter->Next()) {
nsINode* node = aInnerIter->GetCurrentNode();
// Detect the bug of content iterator, but shouldn't cause a crash in
// release builds.
MOZ_ASSERT(node);
nsIContent* innercontent =
node && node->IsContent() ? node->AsContent() : nullptr;
SelectFramesForContent(innercontent, aSelected);
}
return NS_OK;
}
/**
@ -4530,51 +4537,77 @@ Selection::selectFrames(nsPresContext* aPresContext, nsRange* aRange,
return NS_OK;
}
nsCOMPtr<nsIContentIterator> iter = NS_NewContentSubtreeIterator();
iter->Init(aRange);
// Loop through the content iterator for each content node; for each text
// node, call SetSelected on it:
nsCOMPtr<nsIContent> content = do_QueryInterface(aRange->GetStartParent());
if (!content) {
nsINode* startNode = aRange->GetStartParent();
nsIContent* startContent =
startNode && startNode->IsContent() ? startNode->AsContent() : nullptr;
if (!startContent) {
// Don't warn, bug 1055722
return NS_ERROR_UNEXPECTED;
}
// We must call first one explicitly
if (content->IsNodeOfType(nsINode::eTEXT)) {
nsIFrame* frame = content->GetPrimaryFrame();
// The frame could be an SVG text frame, in which case we'll ignore it.
if (frame && frame->IsTextFrame()) {
nsTextFrame* textFrame = static_cast<nsTextFrame*>(frame);
uint32_t startOffset = aRange->StartOffset();
uint32_t endOffset;
if (aRange->GetEndParent() == content) {
endOffset = aRange->EndOffset();
bool isFirstContentTextNode = startContent->IsNodeOfType(nsINode::eTEXT);
nsINode* endNode = aRange->GetEndParent();
if (isFirstContentTextNode) {
nsIFrame* frame = startContent->GetPrimaryFrame();
// The frame could be an SVG text frame, in which case we don't treat it
// as a text frame.
if (frame) {
if (frame->IsTextFrame()) {
nsTextFrame* textFrame = static_cast<nsTextFrame*>(frame);
uint32_t startOffset = aRange->StartOffset();
uint32_t endOffset;
if (endNode == startContent) {
endOffset = aRange->EndOffset();
} else {
endOffset = startContent->Length();
}
textFrame->SetSelectedRange(startOffset, endOffset, aSelect,
mSelectionType);
} else {
endOffset = content->Length();
frame->InvalidateFrameSubtree();
}
textFrame->SetSelectedRange(startOffset, endOffset, aSelect,
mSelectionType);
}
}
iter->First();
// If the range is in a node and the node is a leaf node, we don't need to
// walk the subtree.
if (aRange->Collapsed() ||
(startNode == endNode && !startNode->HasChildren())) {
if (!isFirstContentTextNode) {
SelectFramesForContent(startContent, aSelect);
}
return NS_OK;
}
nsCOMPtr<nsIContentIterator> iter = NS_NewContentSubtreeIterator();
iter->Init(aRange);
if (isFirstContentTextNode && !iter->IsDone()) {
iter->Next(); // first content has already been handled.
}
nsCOMPtr<nsIContentIterator> inneriter = NS_NewContentIterator();
for (iter->First(); !iter->IsDone(); iter->Next()) {
content = do_QueryInterface(iter->GetCurrentNode());
for (; !iter->IsDone(); iter->Next()) {
nsINode* node = iter->GetCurrentNode();
// Detect the bug of content iterator, but shouldn't cause a crash in
// release builds.
MOZ_ASSERT(node);
nsIContent* content =
node && node->IsContent() ? node->AsContent() : nullptr;
SelectAllFramesForContent(inneriter, content, aSelect);
}
// We must now do the last one if it is not the same as the first
if (aRange->GetEndParent() != aRange->GetStartParent()) {
nsresult res;
content = do_QueryInterface(aRange->GetEndParent(), &res);
NS_ENSURE_SUCCESS(res, res);
NS_ENSURE_TRUE(content, res);
if (content->IsNodeOfType(nsINode::eTEXT)) {
nsIFrame* frame = content->GetPrimaryFrame();
if (endNode != startNode) {
nsIContent* endContent =
endNode && endNode->IsContent() ? endNode->AsContent() : nullptr;
if (NS_WARN_IF(!endContent)) {
return NS_ERROR_UNEXPECTED;
}
if (endContent->IsNodeOfType(nsINode::eTEXT)) {
nsIFrame* frame = endContent->GetPrimaryFrame();
// The frame could be an SVG text frame, in which case we'll ignore it.
if (frame && frame->IsTextFrame()) {
nsTextFrame* textFrame = static_cast<nsTextFrame*>(frame);

View File

@ -461,6 +461,19 @@ gvjar.sources += [geckoview_thirdparty_source_dir + f for f in [
'java/com/googlecode/eyesfree/braille/selfbraille/WriteData.java',
]]
if CONFIG['MOZ_ANDROID_HLS_SUPPORT']:
gvjar.sources += [geckoview_source_dir + 'java/org/mozilla/gecko/' + x for x in [
'media/GeckoAudioInfo.java',
'media/GeckoHlsAudioRenderer.java',
'media/GeckoHlsPlayer.java',
'media/GeckoHlsRendererBase.java',
'media/GeckoHlsSample.java',
'media/GeckoHlsVideoRenderer.java',
'media/GeckoVideoInfo.java',
'media/Utils.java',
]]
gvjar.extra_jars += [
CONFIG['ANDROID_SUPPORT_ANNOTATIONS_JAR_LIB'],
CONFIG['ANDROID_SUPPORT_V4_AAR_LIB'],

View File

@ -0,0 +1,30 @@
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
package org.mozilla.gecko.media;
import java.nio.ByteBuffer;
import org.mozilla.gecko.annotation.WrapForJNI;
//A subset of the class AudioInfo in dom/media/MediaInfo.h
@WrapForJNI
public final class GeckoAudioInfo {
final public byte[] codecSpecificData;
final public int rate;
final public int channels;
final public int bitDepth;
final public int profile;
final public long duration;
final public String mimeType;
public GeckoAudioInfo(int rate, int channels, int bitDepth, int profile,
long duration, String mimeType, byte[] codecSpecificData) {
this.rate = rate;
this.channels = channels;
this.bitDepth = bitDepth;
this.profile = profile;
this.duration = duration;
this.mimeType = mimeType;
this.codecSpecificData = codecSpecificData;
}
}

View File

@ -0,0 +1,162 @@
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
package org.mozilla.gecko.media;
import android.media.MediaCodec;
import android.media.MediaCodec.BufferInfo;
import android.media.MediaCodec.CryptoInfo;
import android.os.Handler;
import android.util.Log;
import com.google.android.exoplayer2.C;
import com.google.android.exoplayer2.Format;
import com.google.android.exoplayer2.RendererCapabilities;
import com.google.android.exoplayer2.decoder.DecoderInputBuffer;
import com.google.android.exoplayer2.mediacodec.MediaCodecInfo;
import com.google.android.exoplayer2.mediacodec.MediaCodecSelector;
import com.google.android.exoplayer2.mediacodec.MediaCodecUtil;
import com.google.android.exoplayer2.util.MimeTypes;
import java.nio.ByteBuffer;
import org.mozilla.gecko.AppConstants.Versions;
public class GeckoHlsAudioRenderer extends GeckoHlsRendererBase {
public GeckoHlsAudioRenderer(GeckoHlsPlayer.ComponentEventDispatcher eventDispatcher) {
super(C.TRACK_TYPE_AUDIO, eventDispatcher);
assertTrue(Versions.feature16Plus);
LOGTAG = getClass().getSimpleName();
DEBUG = false;
}
@Override
public final int supportsFormat(Format format) {
/*
* FORMAT_EXCEEDS_CAPABILITIES : The Renderer is capable of rendering
* formats with the same mime type, but
* the properties of the format exceed
* the renderer's capability.
* FORMAT_UNSUPPORTED_SUBTYPE : The Renderer is a general purpose
* renderer for formats of the same
* top-level type, but is not capable of
* rendering the format or any other format
* with the same mime type because the
* sub-type is not supported.
* FORMAT_UNSUPPORTED_TYPE : The Renderer is not capable of rendering
* the format, either because it does not support
* the format's top-level type, or because it's
* a specialized renderer for a different mime type.
* ADAPTIVE_NOT_SEAMLESS : The Renderer can adapt between formats,
* but may suffer a brief discontinuity (~50-100ms)
* when adaptation occurs.
*/
String mimeType = format.sampleMimeType;
if (!MimeTypes.isAudio(mimeType)) {
return RendererCapabilities.FORMAT_UNSUPPORTED_TYPE;
}
MediaCodecInfo decoderInfo = null;
try {
MediaCodecSelector mediaCodecSelector = MediaCodecSelector.DEFAULT;
decoderInfo = mediaCodecSelector.getDecoderInfo(mimeType, false);
} catch (MediaCodecUtil.DecoderQueryException e) {
Log.e(LOGTAG, e.getMessage());
}
if (decoderInfo == null) {
return RendererCapabilities.FORMAT_UNSUPPORTED_SUBTYPE;
}
/*
* Note : If the code can make it to this place, ExoPlayer assumes
* support for unknown sampleRate and channelCount when
* SDK version is less than 21, otherwise, further check is needed
* if there's no sampleRate/channelCount in format.
*/
boolean decoderCapable = Versions.preLollipop ||
((format.sampleRate == Format.NO_VALUE ||
decoderInfo.isAudioSampleRateSupportedV21(format.sampleRate)) &&
(format.channelCount == Format.NO_VALUE ||
decoderInfo.isAudioChannelCountSupportedV21(format.channelCount)));
int formatSupport = decoderCapable ?
RendererCapabilities.FORMAT_HANDLED :
RendererCapabilities.FORMAT_EXCEEDS_CAPABILITIES;
return RendererCapabilities.ADAPTIVE_NOT_SEAMLESS | formatSupport;
}
@Override
protected final void createInputBuffer() {
// We're not able to estimate the size for audio from format. So we rely
// on the dynamic allocation mechanism provided in DecoderInputBuffer.
mInputBuffer = null;
}
@Override
protected void resetRenderer() {
mInputBuffer = null;
mInitialized = false;
}
@Override
protected void handleReconfiguration(DecoderInputBuffer bufferForRead) {
// Do nothing
}
@Override
protected void handleFormatRead(DecoderInputBuffer bufferForRead) {
onInputFormatChanged(mFormatHolder.format);
}
@Override
protected void handleEndOfStream(DecoderInputBuffer bufferForRead) {
mInputStreamEnded = true;
mDemuxedInputSamples.offer(GeckoHlsSample.EOS);
}
@Override
protected void handleSamplePreparation(DecoderInputBuffer bufferForRead) {
int size = bufferForRead.data.limit();
byte[] realData = new byte[size];
bufferForRead.data.get(realData, 0, size);
ByteBuffer buffer = ByteBuffer.wrap(realData);
mInputBuffer = bufferForRead.data;
mInputBuffer.clear();
CryptoInfo cryptoInfo = bufferForRead.isEncrypted() ? bufferForRead.cryptoInfo.getFrameworkCryptoInfoV16() : null;
BufferInfo bufferInfo = new BufferInfo();
// Flags in DecoderInputBuffer are synced with MediaCodec Buffer flags.
int flags = 0;
flags |= bufferForRead.isKeyFrame() ? MediaCodec.BUFFER_FLAG_KEY_FRAME : 0;
flags |= bufferForRead.isEndOfStream() ? MediaCodec.BUFFER_FLAG_END_OF_STREAM : 0;
bufferInfo.set(0, size, bufferForRead.timeUs, flags);
assertTrue(mFormats.size() >= 0);
// We add a new format in the list once format changes, so the formatIndex
// should indicate to the last(latest) format.
GeckoHlsSample sample = GeckoHlsSample.create(buffer,
bufferInfo,
cryptoInfo,
mFormats.size() - 1);
mDemuxedInputSamples.offer(sample);
if (DEBUG) {
Log.d(LOGTAG, "Demuxed sample PTS : " +
sample.info.presentationTimeUs + ", duration :" +
sample.duration + ", formatIndex(" +
sample.formatIndex + "), queue size : " +
mDemuxedInputSamples.size());
}
}
@Override
protected boolean clearInputSamplesQueue() {
if (DEBUG) { Log.d(LOGTAG, "clearInputSamplesQueue"); }
mDemuxedInputSamples.clear();
return true;
}
@Override
protected void notifyPlayerInputFormatChanged(Format newFormat) {
mPlayerEventDispatcher.onAudioInputFormatChanged(newFormat);
}
}

View File

@ -0,0 +1,659 @@
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
package org.mozilla.gecko.media;
import android.content.Context;
import android.net.Uri;
import android.os.Handler;
import android.text.TextUtils;
import android.util.Log;
import android.view.Surface;
import com.google.android.exoplayer2.C;
import com.google.android.exoplayer2.ExoPlaybackException;
import com.google.android.exoplayer2.ExoPlayer;
import com.google.android.exoplayer2.ExoPlayerFactory;
import com.google.android.exoplayer2.Format;
import com.google.android.exoplayer2.PlaybackParameters;
import com.google.android.exoplayer2.RendererCapabilities;
import com.google.android.exoplayer2.Timeline;
import com.google.android.exoplayer2.decoder.DecoderCounters;
import com.google.android.exoplayer2.mediacodec.MediaCodecSelector;
import com.google.android.exoplayer2.source.MediaSource;
import com.google.android.exoplayer2.source.TrackGroup;
import com.google.android.exoplayer2.source.TrackGroupArray;
import com.google.android.exoplayer2.source.hls.HlsMediaSource;
import com.google.android.exoplayer2.trackselection.AdaptiveTrackSelection;
import com.google.android.exoplayer2.trackselection.DefaultTrackSelector;
import com.google.android.exoplayer2.trackselection.MappingTrackSelector.MappedTrackInfo;
import com.google.android.exoplayer2.trackselection.TrackSelection;
import com.google.android.exoplayer2.trackselection.TrackSelectionArray;
import com.google.android.exoplayer2.upstream.DataSource;
import com.google.android.exoplayer2.upstream.DefaultBandwidthMeter;
import com.google.android.exoplayer2.upstream.DefaultDataSourceFactory;
import com.google.android.exoplayer2.upstream.DefaultHttpDataSourceFactory;
import com.google.android.exoplayer2.upstream.HttpDataSource;
import com.google.android.exoplayer2.util.Util;
import org.mozilla.gecko.AppConstants;
import org.mozilla.gecko.GeckoAppShell;
import java.util.concurrent.ConcurrentLinkedQueue;
public class GeckoHlsPlayer implements ExoPlayer.EventListener {
private static final String LOGTAG = "GeckoHlsPlayer";
private static final DefaultBandwidthMeter BANDWIDTH_METER = new DefaultBandwidthMeter();
private static final int MAX_TIMELINE_ITEM_LINES = 3;
private static boolean DEBUG = false;
private DataSource.Factory mMediaDataSourceFactory;
private Handler mMainHandler;
private ExoPlayer mPlayer;
private GeckoHlsRendererBase[] mRenderers;
private DefaultTrackSelector mTrackSelector;
private MediaSource mMediaSource;
private ComponentListener mComponentListener;
private ComponentEventDispatcher mComponentEventDispatcher;
private boolean mIsTimelineStatic = false;
private long mDurationUs;
private GeckoHlsVideoRenderer mVRenderer = null;
private GeckoHlsAudioRenderer mARenderer = null;
// Able to control if we only want V/A/V+A tracks from bitstream.
private class RendererController {
private final boolean mEnableV;
private final boolean mEnableA;
RendererController(boolean enableVideoRenderer, boolean enableAudioRenderer) {
this.mEnableV = enableVideoRenderer;
this.mEnableA = enableAudioRenderer;
}
boolean isVideoRendererEnabled() { return mEnableV; }
boolean isAudioRendererEnabled() { return mEnableA; }
}
private RendererController mRendererController = new RendererController(true, true);
// Provide statistical information of tracks.
private class HlsMediaTracksInfo {
private int mNumVideoTracks = 0;
private int mNumAudioTracks = 0;
private boolean mVideoInfoUpdated = false;
private boolean mAudioInfoUpdated = false;
HlsMediaTracksInfo(int numVideoTracks, int numAudioTracks) {
this.mNumVideoTracks = numVideoTracks;
this.mNumAudioTracks = numAudioTracks;
}
public boolean hasVideo() { return mNumVideoTracks > 0; }
public boolean hasAudio() { return mNumAudioTracks > 0; }
public int getNumOfVideoTracks() { return mNumVideoTracks; }
public int getNumOfAudioTracks() { return mNumAudioTracks; }
public void onVideoInfoUpdated() { mVideoInfoUpdated = true; }
public void onAudioInfoUpdated() { mAudioInfoUpdated = true; }
public boolean videoReady() {
return hasVideo() ? mVideoInfoUpdated : true;
}
public boolean audioReady() {
return hasAudio() ? mAudioInfoUpdated : true;
}
}
private HlsMediaTracksInfo mTracksInfo = null;
private boolean mIsPlayerInitDone = false;
private boolean mIsDemuxerInitDone = false;
private DemuxerCallbacks mDemuxerCallbacks;
private ResourceCallbacks mResourceCallbacks;
public enum TrackType {
UNDEFINED,
AUDIO,
VIDEO,
TEXT,
}
public enum ResourceError {
BASE(-100),
UNKNOWN(-101),
PLAYER(-102),
UNSUPPORTED(-103);
private int mNumVal;
private ResourceError(int numVal) {
mNumVal = numVal;
}
public int code() {
return mNumVal;
}
}
public enum DemuxerError {
BASE(-200),
UNKNOWN(-201),
PLAYER(-202),
UNSUPPORTED(-203);
private int mNumVal;
private DemuxerError(int numVal) {
mNumVal = numVal;
}
public int code() {
return mNumVal;
}
}
public interface DemuxerCallbacks {
void onInitialized(boolean hasAudio, boolean hasVideo);
void onError(int errorCode);
}
public interface ResourceCallbacks {
void onDataArrived();
void onError(int errorCode);
}
private static void assertTrue(boolean condition) {
if (DEBUG && !condition) {
throw new AssertionError("Expected condition to be true");
}
}
public void checkInitDone() {
assertTrue(mDemuxerCallbacks != null);
assertTrue(mTracksInfo != null);
if (mIsDemuxerInitDone) {
return;
}
if (DEBUG) {
Log.d(LOGTAG, "[checkInitDone] VReady:" + mTracksInfo.videoReady() +
",AReady:" + mTracksInfo.audioReady() +
",hasV:" + mTracksInfo.hasVideo() +
",hasA:" + mTracksInfo.hasAudio());
}
if (mTracksInfo.videoReady() && mTracksInfo.audioReady()) {
mDemuxerCallbacks.onInitialized(mTracksInfo.hasAudio(), mTracksInfo.hasVideo());
mIsDemuxerInitDone = true;
}
}
public final class ComponentEventDispatcher {
public void onDataArrived() {
assertTrue(mMainHandler != null);
assertTrue(mComponentListener != null);
if (!mIsPlayerInitDone) {
return;
}
if (mMainHandler != null && mComponentListener != null) {
mMainHandler.post(new Runnable() {
@Override
public void run() {
mComponentListener.onDataArrived();
}
});
}
}
public void onVideoInputFormatChanged(final Format format) {
assertTrue(mMainHandler != null);
assertTrue(mComponentListener != null);
if (!mIsPlayerInitDone) {
return;
}
if (mMainHandler != null && mComponentListener != null) {
mMainHandler.post(new Runnable() {
@Override
public void run() {
mComponentListener.onVideoInputFormatChanged(format);
}
});
}
}
public void onAudioInputFormatChanged(final Format format) {
assertTrue(mMainHandler != null);
assertTrue(mComponentListener != null);
if (!mIsPlayerInitDone) {
return;
}
if (mMainHandler != null && mComponentListener != null) {
mMainHandler.post(new Runnable() {
@Override
public void run() {
mComponentListener.onAudioInputFormatChanged(format);
}
});
}
}
}
public final class ComponentListener {
// General purpose implementation
public void onDataArrived() {
assertTrue(mResourceCallbacks != null);
Log.d(LOGTAG, "[CB][onDataArrived]");
mResourceCallbacks.onDataArrived();
}
public void onVideoInputFormatChanged(Format format) {
assertTrue(mTracksInfo != null);
if (DEBUG) {
Log.d(LOGTAG, "[CB] onVideoInputFormatChanged [" + format + "]");
Log.d(LOGTAG, "[CB] SampleMIMEType [" +
format.sampleMimeType + "], ContainerMIMEType [" +
format.containerMimeType + "]");
}
mTracksInfo.onVideoInfoUpdated();
checkInitDone();
}
public void onAudioInputFormatChanged(Format format) {
assertTrue(mTracksInfo != null);
if (DEBUG) { Log.d(LOGTAG, "[CB] onAudioInputFormatChanged [" + format + "]"); }
mTracksInfo.onAudioInfoUpdated();
checkInitDone();
}
}
public DataSource.Factory buildDataSourceFactory(Context ctx, DefaultBandwidthMeter bandwidthMeter) {
return new DefaultDataSourceFactory(ctx, bandwidthMeter,
buildHttpDataSourceFactory(bandwidthMeter));
}
public HttpDataSource.Factory buildHttpDataSourceFactory(DefaultBandwidthMeter bandwidthMeter) {
return new DefaultHttpDataSourceFactory(AppConstants.USER_AGENT_FENNEC_MOBILE, bandwidthMeter);
}
private MediaSource buildMediaSource(Uri uri, String overrideExtension) {
if (DEBUG) { Log.d(LOGTAG, "buildMediaSource uri[" + uri + "]" + ", overridedExt[" + overrideExtension + "]"); }
int type = Util.inferContentType(TextUtils.isEmpty(overrideExtension)
? uri.getLastPathSegment()
: "." + overrideExtension);
switch (type) {
case C.TYPE_HLS:
return new HlsMediaSource(uri, mMediaDataSourceFactory, mMainHandler, null);
default:
mResourceCallbacks.onError(ResourceError.UNSUPPORTED.code());
throw new IllegalArgumentException("Unsupported type: " + type);
}
}
GeckoHlsPlayer() {
if (DEBUG) { Log.d(LOGTAG, " construct"); }
}
void addResourceWrapperCallbackListener(ResourceCallbacks callback) {
if (DEBUG) { Log.d(LOGTAG, " addResourceWrapperCallbackListener ..."); }
mResourceCallbacks = callback;
}
void addDemuxerWrapperCallbackListener(DemuxerCallbacks callback) {
if (DEBUG) { Log.d(LOGTAG, " addDemuxerWrapperCallbackListener ..."); }
mDemuxerCallbacks = callback;
}
@Override
public void onLoadingChanged(boolean isLoading) {
if (DEBUG) { Log.d(LOGTAG, "loading [" + isLoading + "]"); }
if (!isLoading) {
// To update buffered position.
mComponentEventDispatcher.onDataArrived();
}
}
@Override
public void onPlayerStateChanged(boolean playWhenReady, int state) {
if (DEBUG) { Log.d(LOGTAG, "state [" + playWhenReady + ", " + getStateString(state) + "]"); }
if (state == ExoPlayer.STATE_READY) {
mPlayer.setPlayWhenReady(true);
}
}
@Override
public void onPositionDiscontinuity() {
if (DEBUG) { Log.d(LOGTAG, "positionDiscontinuity"); }
}
@Override
public void onPlaybackParametersChanged(PlaybackParameters playbackParameters) {
if (DEBUG) {
Log.d(LOGTAG, "playbackParameters " +
String.format("[speed=%.2f, pitch=%.2f]", playbackParameters.speed, playbackParameters.pitch));
}
}
@Override
public void onPlayerError(ExoPlaybackException e) {
if (DEBUG) { Log.e(LOGTAG, "playerFailed" , e); }
if (mResourceCallbacks != null) {
mResourceCallbacks.onError(ResourceError.PLAYER.code());
}
if (mDemuxerCallbacks != null) {
mDemuxerCallbacks.onError(DemuxerError.PLAYER.code());
}
}
@Override
public synchronized void onTracksChanged(TrackGroupArray ignored, TrackSelectionArray trackSelections) {
if (DEBUG) {
Log.d(LOGTAG, "onTracksChanged : TGA[" + ignored +
"], TSA[" + trackSelections + "]");
MappedTrackInfo mappedTrackInfo = mTrackSelector.getCurrentMappedTrackInfo();
if (mappedTrackInfo == null) {
Log.d(LOGTAG, "Tracks []");
return;
}
Log.d(LOGTAG, "Tracks [");
// Log tracks associated to renderers.
for (int rendererIndex = 0; rendererIndex < mappedTrackInfo.length; rendererIndex++) {
TrackGroupArray rendererTrackGroups = mappedTrackInfo.getTrackGroups(rendererIndex);
TrackSelection trackSelection = trackSelections.get(rendererIndex);
if (rendererTrackGroups.length > 0) {
Log.d(LOGTAG, " Renderer:" + rendererIndex + " [");
for (int groupIndex = 0; groupIndex < rendererTrackGroups.length; groupIndex++) {
TrackGroup trackGroup = rendererTrackGroups.get(groupIndex);
String adaptiveSupport = getAdaptiveSupportString(trackGroup.length,
mappedTrackInfo.getAdaptiveSupport(rendererIndex, groupIndex, false));
Log.d(LOGTAG, " Group:" + groupIndex + ", adaptive_supported=" + adaptiveSupport + " [");
for (int trackIndex = 0; trackIndex < trackGroup.length; trackIndex++) {
String status = getTrackStatusString(trackSelection, trackGroup, trackIndex);
String formatSupport = getFormatSupportString(
mappedTrackInfo.getTrackFormatSupport(rendererIndex, groupIndex, trackIndex));
Log.d(LOGTAG, " " + status + " Track:" + trackIndex +
", " + Format.toLogString(trackGroup.getFormat(trackIndex)) +
", supported=" + formatSupport);
}
Log.d(LOGTAG, " ]");
}
Log.d(LOGTAG, " ]");
}
}
// Log tracks not associated with a renderer.
TrackGroupArray unassociatedTrackGroups = mappedTrackInfo.getUnassociatedTrackGroups();
if (unassociatedTrackGroups.length > 0) {
Log.d(LOGTAG, " Renderer:None [");
for (int groupIndex = 0; groupIndex < unassociatedTrackGroups.length; groupIndex++) {
Log.d(LOGTAG, " Group:" + groupIndex + " [");
TrackGroup trackGroup = unassociatedTrackGroups.get(groupIndex);
for (int trackIndex = 0; trackIndex < trackGroup.length; trackIndex++) {
String status = getTrackStatusString(false);
String formatSupport = getFormatSupportString(
RendererCapabilities.FORMAT_UNSUPPORTED_TYPE);
Log.d(LOGTAG, " " + status + " Track:" + trackIndex +
", " + Format.toLogString(trackGroup.getFormat(trackIndex)) +
", supported=" + formatSupport);
}
Log.d(LOGTAG, " ]");
}
Log.d(LOGTAG, " ]");
}
Log.d(LOGTAG, "]");
}
mTracksInfo = null;
int numVideoTracks = 0;
int numAudioTracks = 0;
for (int j = 0; j < ignored.length; j++) {
TrackGroup tg = ignored.get(j);
for (int i = 0; i < tg.length; i++) {
Format fmt = tg.getFormat(i);
if (fmt.sampleMimeType != null) {
if (mRendererController.isVideoRendererEnabled() &&
fmt.sampleMimeType.startsWith(new String("video"))) {
numVideoTracks++;
} else if (mRendererController.isAudioRendererEnabled() &&
fmt.sampleMimeType.startsWith(new String("audio"))) {
numAudioTracks++;
}
}
}
}
mTracksInfo = new HlsMediaTracksInfo(numVideoTracks, numAudioTracks);
}
@Override
public void onTimelineChanged(Timeline timeline, Object manifest) {
// For now, we use the interface ExoPlayer.getDuration() for gecko,
// so here we create local variable 'window' & 'peroid' to obtain
// the dynamic duration.
// See. http://google.github.io/ExoPlayer/doc/reference/com/google/android/exoplayer2/Timeline.html
// for further information.
Timeline.Window window = new Timeline.Window();
mIsTimelineStatic = !timeline.isEmpty()
&& !timeline.getWindow(timeline.getWindowCount() - 1, window).isDynamic;
int periodCount = timeline.getPeriodCount();
int windowCount = timeline.getWindowCount();
if (DEBUG) { Log.d(LOGTAG, "sourceInfo [periodCount=" + periodCount + ", windowCount=" + windowCount); }
Timeline.Period period = new Timeline.Period();
for (int i = 0; i < Math.min(periodCount, MAX_TIMELINE_ITEM_LINES); i++) {
timeline.getPeriod(i, period);
if (mDurationUs < period.getDurationUs()) {
mDurationUs = period.getDurationUs();
}
}
for (int i = 0; i < Math.min(windowCount, MAX_TIMELINE_ITEM_LINES); i++) {
timeline.getWindow(i, window);
if (mDurationUs < window.getDurationUs()) {
mDurationUs = window.getDurationUs();
}
}
// TODO : Need to check if the duration from play.getDuration is different
// with the one calculated from multi-timelines/windows.
if (DEBUG) {
Log.d(LOGTAG, "Media duration (from Timeline) = " + mDurationUs +
"(us)" + " player.getDuration() = " + mPlayer.getDuration() +
"(ms)");
}
}
private static String getStateString(int state) {
switch (state) {
case ExoPlayer.STATE_BUFFERING:
return "B";
case ExoPlayer.STATE_ENDED:
return "E";
case ExoPlayer.STATE_IDLE:
return "I";
case ExoPlayer.STATE_READY:
return "R";
default:
return "?";
}
}
private static String getFormatSupportString(int formatSupport) {
switch (formatSupport) {
case RendererCapabilities.FORMAT_HANDLED:
return "YES";
case RendererCapabilities.FORMAT_EXCEEDS_CAPABILITIES:
return "NO_EXCEEDS_CAPABILITIES";
case RendererCapabilities.FORMAT_UNSUPPORTED_SUBTYPE:
return "NO_UNSUPPORTED_TYPE";
case RendererCapabilities.FORMAT_UNSUPPORTED_TYPE:
return "NO";
default:
return "?";
}
}
private static String getAdaptiveSupportString(int trackCount, int adaptiveSupport) {
if (trackCount < 2) {
return "N/A";
}
switch (adaptiveSupport) {
case RendererCapabilities.ADAPTIVE_SEAMLESS:
return "YES";
case RendererCapabilities.ADAPTIVE_NOT_SEAMLESS:
return "YES_NOT_SEAMLESS";
case RendererCapabilities.ADAPTIVE_NOT_SUPPORTED:
return "NO";
default:
return "?";
}
}
private static String getTrackStatusString(TrackSelection selection, TrackGroup group,
int trackIndex) {
return getTrackStatusString(selection != null && selection.getTrackGroup() == group
&& selection.indexOf(trackIndex) != C.INDEX_UNSET);
}
private static String getTrackStatusString(boolean enabled) {
return enabled ? "[X]" : "[ ]";
}
// =======================================================================
// API for GeckoHlsResourceWrapper
// =======================================================================
synchronized void init(String url) {
if (DEBUG) { Log.d(LOGTAG, " init"); }
assertTrue(mResourceCallbacks != null);
if (mIsPlayerInitDone) {
return;
}
Context ctx = GeckoAppShell.getApplicationContext();
mComponentListener = new ComponentListener();
mComponentEventDispatcher = new ComponentEventDispatcher();
mMainHandler = new Handler();
mDurationUs = 0;
// Prepare trackSelector
TrackSelection.Factory videoTrackSelectionFactory =
new AdaptiveTrackSelection.Factory(BANDWIDTH_METER);
mTrackSelector = new DefaultTrackSelector(videoTrackSelectionFactory);
// Prepare customized renderer
mRenderers = new GeckoHlsRendererBase[2];
mVRenderer = new GeckoHlsVideoRenderer(mComponentEventDispatcher);
mARenderer = new GeckoHlsAudioRenderer(mComponentEventDispatcher);
mRenderers[0] = mVRenderer;
mRenderers[1] = mARenderer;
// Create ExoPlayer instance with specific components.
mPlayer = ExoPlayerFactory.newInstance(mRenderers, mTrackSelector);
mPlayer.addListener(this);
Uri uri = Uri.parse(url);
mMediaDataSourceFactory = buildDataSourceFactory(ctx, BANDWIDTH_METER);
mMediaSource = buildMediaSource(uri, null);
mPlayer.prepare(mMediaSource);
mIsPlayerInitDone = true;
}
public boolean isLiveStream() {
return !mIsTimelineStatic;
}
// =======================================================================
// API for GeckoHlsDemuxerWrapper
// =======================================================================
public ConcurrentLinkedQueue<GeckoHlsSample> getVideoSamples(int number) {
return mVRenderer != null ? mVRenderer.getQueuedSamples(number) :
new ConcurrentLinkedQueue<GeckoHlsSample>();
}
public ConcurrentLinkedQueue<GeckoHlsSample> getAudioSamples(int number) {
return mARenderer != null ? mARenderer.getQueuedSamples(number) :
new ConcurrentLinkedQueue<GeckoHlsSample>();
}
public long getDuration() {
assertTrue(mPlayer != null);
// Value returned by getDuration() is in milliseconds.
long duration = mPlayer.getDuration() * 1000;
if (DEBUG) { Log.d(LOGTAG, "getDuration : " + duration + "(Us)"); }
return duration;
}
public long getBufferedPosition() {
assertTrue(mPlayer != null);
// Value returned by getBufferedPosition() is in milliseconds.
long bufferedPos = mPlayer.getBufferedPosition() * 1000;
if (DEBUG) { Log.d(LOGTAG, "getBufferedPosition : " + bufferedPos + "(Us)"); }
return bufferedPos;
}
public synchronized int getNumberOfTracks(TrackType trackType) {
if (DEBUG) { Log.d(LOGTAG, "getNumberOfTracks"); }
assertTrue(mTracksInfo != null);
if (trackType == TrackType.VIDEO) {
return mTracksInfo.getNumOfVideoTracks();
} else if (trackType == TrackType.AUDIO) {
return mTracksInfo.getNumOfAudioTracks();
}
return 0;
}
public Format getVideoTrackFormat(int index) {
if (DEBUG) { Log.d(LOGTAG, "getVideoTrackFormat"); }
assertTrue(mVRenderer != null);
assertTrue(mTracksInfo != null);
return mTracksInfo.hasVideo() ? mVRenderer.getFormat(index) : null;
}
public Format getAudioTrackFormat(int index) {
if (DEBUG) { Log.d(LOGTAG, "getAudioTrackFormat"); }
assertTrue(mARenderer != null);
assertTrue(mTracksInfo != null);
return mTracksInfo.hasAudio() ? mARenderer.getFormat(index) : null;
}
public boolean seek(long positionUs) {
// positionUs : microseconds.
// NOTE : 1) It's not possible to seek media by tracktype via ExoPlayer Interface.
// 2) positionUs is samples PTS from MFR, we need to re-adjust it
// for ExoPlayer by subtracting sample start time.
// 3) Time unit for ExoPlayer.seek() is milliseconds.
try {
// TODO : Gather Timeline Period / Window information to develop
// complete timeline, and seekTime should be inside the duration.
Long startTime = Long.MAX_VALUE;
for (GeckoHlsRendererBase r : mRenderers) {
if (r == mVRenderer && mRendererController.isVideoRendererEnabled() ||
r == mARenderer && mRendererController.isAudioRendererEnabled()) {
// Find the min value of the start time
startTime = Math.min(startTime, r.getFirstSamplePTS());
}
}
if (DEBUG) {
Log.d(LOGTAG, "seeking : " + positionUs / 1000 +
" (ms); startTime : " + startTime / 1000 + " (ms)");
}
assertTrue(startTime != Long.MAX_VALUE);
mPlayer.seekTo(positionUs / 1000 - startTime / 1000);
} catch (Exception e) {
mDemuxerCallbacks.onError(DemuxerError.UNKNOWN.code());
return false;
}
return true;
}
public long getNextKeyFrameTime() {
long nextKeyFrameTime = mVRenderer != null
? mVRenderer.getNextKeyFrameTime()
: Long.MAX_VALUE;
return nextKeyFrameTime;
}
public void release() {
if (DEBUG) { Log.d(LOGTAG, "releasing ..."); }
if (mPlayer != null) {
mPlayer.removeListener(this);
mPlayer.stop();
mPlayer.release();
mVRenderer = null;
mARenderer = null;
mPlayer = null;
}
mDemuxerCallbacks = null;
mResourceCallbacks = null;
mIsPlayerInitDone = false;
mIsDemuxerInitDone = false;
}
}

View File

@ -0,0 +1,300 @@
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
package org.mozilla.gecko.media;
import android.util.Log;
import com.google.android.exoplayer2.BaseRenderer;
import com.google.android.exoplayer2.C;
import com.google.android.exoplayer2.decoder.DecoderInputBuffer;
import com.google.android.exoplayer2.Format;
import com.google.android.exoplayer2.FormatHolder;
import java.nio.ByteBuffer;
import java.util.ArrayList;
import java.util.concurrent.ConcurrentLinkedQueue;
import java.util.Iterator;
public abstract class GeckoHlsRendererBase extends BaseRenderer {
protected static final int QUEUED_INPUT_SAMPLE_DURATION_THRESHOLD = 1000000; //1sec
protected final FormatHolder mFormatHolder = new FormatHolder();
/*
* DEBUG/LOGTAG will be set in the 2 subclass GeckoHlsAudioRenderer and
* GeckoHlsVideoRenderer, and we still wants to log message in the base class
* GeckoHlsRendererBase, so neither 'static' nor 'final' are applied to them.
*/
protected boolean DEBUG;
protected String LOGTAG;
// Notify GeckoHlsPlayer about renderer's status, i.e. data has arrived.
protected GeckoHlsPlayer.ComponentEventDispatcher mPlayerEventDispatcher;
protected ConcurrentLinkedQueue<GeckoHlsSample> mDemuxedInputSamples = new ConcurrentLinkedQueue<>();
protected ByteBuffer mInputBuffer = null;
protected ArrayList<Format> mFormats = new ArrayList<Format>();
protected boolean mInitialized = false;
protected boolean mWaitingForData = true;
protected boolean mInputStreamEnded = false;
protected long mFirstSampleStartTime = Long.MIN_VALUE;
protected abstract void createInputBuffer();
protected abstract void handleReconfiguration(DecoderInputBuffer bufferForRead);
protected abstract void handleFormatRead(DecoderInputBuffer bufferForRead);
protected abstract void handleEndOfStream(DecoderInputBuffer bufferForRead);
protected abstract void handleSamplePreparation(DecoderInputBuffer bufferForRead);
protected abstract void resetRenderer();
protected abstract boolean clearInputSamplesQueue();
protected abstract void notifyPlayerInputFormatChanged(Format newFormat);
protected void assertTrue(boolean condition) {
if (DEBUG && !condition) {
throw new AssertionError("Expected condition to be true");
}
}
public GeckoHlsRendererBase(int trackType, GeckoHlsPlayer.ComponentEventDispatcher eventDispatcher) {
super(trackType);
mPlayerEventDispatcher = eventDispatcher;
}
private boolean isQueuedEnoughData() {
if (mDemuxedInputSamples.isEmpty()) {
return false;
}
Iterator<GeckoHlsSample> iter = mDemuxedInputSamples.iterator();
long firstPTS = 0;
if (iter.hasNext()) {
GeckoHlsSample sample = iter.next();
firstPTS = sample.info.presentationTimeUs;
}
long lastPTS = firstPTS;
while (iter.hasNext()) {
GeckoHlsSample sample = iter.next();
lastPTS = sample.info.presentationTimeUs;
}
return Math.abs(lastPTS - firstPTS) > QUEUED_INPUT_SAMPLE_DURATION_THRESHOLD;
}
public Format getFormat(int index) {
assertTrue(index >= 0);
Format fmt = index < mFormats.size() ? mFormats.get(index) : null;
if (DEBUG) { Log.d(LOGTAG, "getFormat : index = " + index + ", format : " + fmt); }
return fmt;
}
public long getFirstSamplePTS() { return mFirstSampleStartTime; }
public synchronized ConcurrentLinkedQueue<GeckoHlsSample> getQueuedSamples(int number) {
ConcurrentLinkedQueue<GeckoHlsSample> samples =
new ConcurrentLinkedQueue<GeckoHlsSample>();
int queuedSize = mDemuxedInputSamples.size();
for (int i = 0; i < queuedSize; i++) {
if (i >= number) {
break;
}
GeckoHlsSample sample = mDemuxedInputSamples.poll();
samples.offer(sample);
}
if (samples.isEmpty()) {
if (DEBUG) { Log.d(LOGTAG, "getQueuedSamples isEmpty, mWaitingForData = true !"); }
mWaitingForData = true;
} else if (mFirstSampleStartTime == Long.MIN_VALUE) {
mFirstSampleStartTime = samples.peek().info.presentationTimeUs;
if (DEBUG) { Log.d(LOGTAG, "mFirstSampleStartTime = " + mFirstSampleStartTime); }
}
return samples;
}
protected void handleDrmInitChanged(Format oldFormat, Format newFormat) {
Object oldDrmInit = oldFormat == null ? null : oldFormat.drmInitData;
Object newDrnInit = newFormat.drmInitData;
// TODO: Notify MFR if the content is encrypted or not.
if (newDrnInit != oldDrmInit) {
if (newDrnInit != null) {
} else {
}
}
}
protected boolean canReconfigure(Format oldFormat, Format newFormat) {
// Referring to ExoPlayer's MediaCodecBaseRenderer, the default is set
// to false. Only override it in video renderer subclass.
return false;
}
protected void prepareReconfiguration() {
// Referring to ExoPlayer's MediaCodec related renderers, only video
// renderer handles this.
}
protected void updateCSDInfo(Format format) {
// do nothing.
}
protected void onInputFormatChanged(Format newFormat) {
Format oldFormat;
try {
oldFormat = mFormats.get(mFormats.size() - 1);
} catch (IndexOutOfBoundsException e) {
oldFormat = null;
}
if (DEBUG) {
Log.d(LOGTAG, "[onInputFormatChanged] old : " + oldFormat +
" => new : " + newFormat);
}
mFormats.add(newFormat);
handleDrmInitChanged(oldFormat, newFormat);
if (mInitialized && canReconfigure(oldFormat, newFormat)) {
prepareReconfiguration();
} else {
resetRenderer();
maybeInitRenderer();
}
updateCSDInfo(newFormat);
notifyPlayerInputFormatChanged(newFormat);
}
protected void maybeInitRenderer() {
if (mInitialized || mFormats.size() == 0) {
return;
}
if (DEBUG) { Log.d(LOGTAG, "Initializing ... "); }
createInputBuffer();
mInitialized = true;
}
/*
* The place we get demuxed data from HlsMediaSource(ExoPlayer).
* The data will then be converted to GeckoHlsSample and deliver to
* GeckoHlsDemuxerWrapper for further use.
* If the return value is ture, that means a GeckoHlsSample is queued
* successfully. We can try to feed more samples into queue.
* If the return value is false, that means we might encounter following
* situation 1) not initialized 2) input stream is ended 3) queue is full.
* 4) format changed. 5) exception happened.
*/
protected synchronized boolean feedInputBuffersQueue() {
if (!mInitialized || mInputStreamEnded || isQueuedEnoughData()) {
// Need to reinitialize the renderer or the input stream has ended
// or we just reached the maximum queue size.
return false;
}
DecoderInputBuffer bufferForRead =
new DecoderInputBuffer(DecoderInputBuffer.BUFFER_REPLACEMENT_MODE_NORMAL);
bufferForRead.data = mInputBuffer;
if (bufferForRead.data != null) {
bufferForRead.clear();
}
handleReconfiguration(bufferForRead);
// Read data from HlsMediaSource
int result = C.RESULT_NOTHING_READ;
try {
result = readSource(mFormatHolder, bufferForRead, false);
} catch (Exception e) {
Log.e(LOGTAG, "[feedInput] Exception when readSource :", e);
return false;
}
if (result == C.RESULT_NOTHING_READ) {
return false;
}
if (result == C.RESULT_FORMAT_READ) {
handleFormatRead(bufferForRead);
return true;
}
// We've read a buffer.
if (bufferForRead.isEndOfStream()) {
if (DEBUG) { Log.d(LOGTAG, "Now we're at the End Of Stream."); }
handleEndOfStream(bufferForRead);
return false;
}
bufferForRead.flip();
handleSamplePreparation(bufferForRead);
maybeNotifyDataArrived();
return true;
}
private void maybeNotifyDataArrived() {
if (mWaitingForData && isQueuedEnoughData()) {
if (DEBUG) { Log.d(LOGTAG, "onDataArrived"); }
mPlayerEventDispatcher.onDataArrived();
mWaitingForData = false;
}
}
private void readFormat() {
int result = readSource(mFormatHolder, null, true);
if (result == C.RESULT_FORMAT_READ) {
onInputFormatChanged(mFormatHolder.format);
}
}
@Override
protected void onEnabled(boolean joining) {
// Do nothing.
}
@Override
protected void onDisabled() {
mFormats.clear();
resetRenderer();
}
@Override
public boolean isReady() {
return mFormats.size() != 0;
}
@Override
public boolean isEnded() {
return mInputStreamEnded;
}
@Override
protected synchronized void onPositionReset(long positionUs, boolean joining) {
if (DEBUG) { Log.d(LOGTAG, "onPositionReset : positionUs = " + positionUs); }
mInputStreamEnded = false;
if (mInitialized) {
clearInputSamplesQueue();
}
}
/*
* This is called by ExoPlayerImplInternal.java.
* ExoPlayer checks the status of renderer, i.e. isReady() / isEnded(), and
* calls renderer.render by passing its wall clock time.
*/
@Override
public void render(long positionUs, long elapsedRealtimeUs) {
if (DEBUG) {
Log.d(LOGTAG, "positionUs = " + positionUs +
", mInputStreamEnded = " + mInputStreamEnded);
}
if (mInputStreamEnded) {
return;
}
if (mFormats.size() == 0) {
readFormat();
}
maybeInitRenderer();
while (feedInputBuffersQueue()) {
// Do nothing
}
}
}

View File

@ -0,0 +1,86 @@
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
package org.mozilla.gecko.media;
import android.media.MediaCodec;
import android.media.MediaCodec.BufferInfo;
import android.media.MediaCodec.CryptoInfo;
import org.mozilla.gecko.annotation.WrapForJNI;
import java.io.IOException;
import java.nio.ByteBuffer;
public final class GeckoHlsSample {
public static final GeckoHlsSample EOS;
static {
BufferInfo eosInfo = new BufferInfo();
eosInfo.set(0, 0, Long.MIN_VALUE, MediaCodec.BUFFER_FLAG_END_OF_STREAM);
EOS = new GeckoHlsSample(null, eosInfo, null, 0);
}
// Indicate the index of format which is used by this sample.
@WrapForJNI
final public int formatIndex;
@WrapForJNI
public long duration;
@WrapForJNI
final public BufferInfo info;
@WrapForJNI
final public CryptoInfo cryptoInfo;
private ByteBuffer buffer = null;
@WrapForJNI
public void writeToByteBuffer(ByteBuffer dest) throws IOException {
if (buffer != null && dest != null && info.size > 0) {
dest.put(buffer);
}
}
@WrapForJNI
public boolean isEOS() {
return (info.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0;
}
@WrapForJNI
public boolean isKeyFrame() {
return (info.flags & MediaCodec.BUFFER_FLAG_KEY_FRAME) != 0;
}
public static GeckoHlsSample create(ByteBuffer src, BufferInfo info, CryptoInfo cryptoInfo,
int formatIndex) {
return new GeckoHlsSample(src, info, cryptoInfo, formatIndex);
}
private GeckoHlsSample(ByteBuffer buffer, BufferInfo info, CryptoInfo cryptoInfo,
int formatIndex) {
this.formatIndex = formatIndex;
duration = Long.MAX_VALUE;
this.buffer = buffer;
this.info = info;
this.cryptoInfo = cryptoInfo;
}
@Override
public String toString() {
if (isEOS()) {
return "EOS GeckoHlsSample";
}
StringBuilder str = new StringBuilder();
str.append("{ info=").
append("{ offset=").append(info.offset).
append(", size=").append(info.size).
append(", pts=").append(info.presentationTimeUs).
append(", duration=").append(duration).
append(", flags=").append(Integer.toHexString(info.flags)).append(" }").
append(" }");
return str.toString();
}
}

View File

@ -0,0 +1,463 @@
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
package org.mozilla.gecko.media;
import android.media.MediaCodec;
import android.media.MediaCodec.BufferInfo;
import android.media.MediaCodec.CryptoInfo;
import android.os.Handler;
import android.util.Log;
import com.google.android.exoplayer2.C;
import com.google.android.exoplayer2.Format;
import com.google.android.exoplayer2.decoder.DecoderInputBuffer;
import com.google.android.exoplayer2.mediacodec.MediaCodecInfo;
import com.google.android.exoplayer2.mediacodec.MediaCodecSelector;
import com.google.android.exoplayer2.mediacodec.MediaCodecUtil;
import com.google.android.exoplayer2.RendererCapabilities;
import com.google.android.exoplayer2.util.MimeTypes;
import java.nio.ByteBuffer;
import java.util.concurrent.ConcurrentLinkedQueue;
import org.mozilla.gecko.AppConstants.Versions;
public class GeckoHlsVideoRenderer extends GeckoHlsRendererBase {
/*
* By configuring these states, initialization data is provided for
* ExoPlayer's HlsMediaSource to parse HLS bitstream and then provide samples
* starting with an Access Unit Delimiter including SPS/PPS for TS,
* and provide samples starting with an AUD without SPS/PPS for FMP4.
*/
private enum RECONFIGURATION_STATE {
NONE,
WRITE_PENDING,
QUEUE_PENDING
}
private boolean mRendererReconfigured;
private RECONFIGURATION_STATE mRendererReconfigurationState = RECONFIGURATION_STATE.NONE;
// A list of the formats which may be included in the bitstream.
private Format[] mStreamFormats;
// The max width/height/inputBufferSize for specific codec format.
private CodecMaxValues mCodecMaxValues;
// A temporary queue for samples whose duration is not calculated yet.
private ConcurrentLinkedQueue<GeckoHlsSample> mDemuxedNoDurationSamples =
new ConcurrentLinkedQueue<>();
// Contain CSD-0(SPS)/CSD-1(PPS) information (in AnnexB format) for
// prepending each keyframe. When video format changes, this information
// changes accordingly.
private byte[] mCSDInfo = null;
public GeckoHlsVideoRenderer(GeckoHlsPlayer.ComponentEventDispatcher eventDispatcher) {
super(C.TRACK_TYPE_VIDEO, eventDispatcher);
assertTrue(Versions.feature16Plus);
LOGTAG = getClass().getSimpleName();
DEBUG = false;
}
@Override
public final int supportsMixedMimeTypeAdaptation() {
return ADAPTIVE_NOT_SEAMLESS;
}
@Override
public final int supportsFormat(Format format) {
/*
* FORMAT_EXCEEDS_CAPABILITIES : The Renderer is capable of rendering
* formats with the same mime type, but
* the properties of the format exceed
* the renderer's capability.
* FORMAT_UNSUPPORTED_SUBTYPE : The Renderer is a general purpose
* renderer for formats of the same
* top-level type, but is not capable of
* rendering the format or any other format
* with the same mime type because the
* sub-type is not supported.
* FORMAT_UNSUPPORTED_TYPE : The Renderer is not capable of rendering
* the format, either because it does not support
* the format's top-level type, or because it's
* a specialized renderer for a different mime type.
* ADAPTIVE_NOT_SEAMLESS : The Renderer can adapt between formats,
* but may suffer a brief discontinuity (~50-100ms)
* when adaptation occurs.
* ADAPTIVE_SEAMLESS : The Renderer can seamlessly adapt between formats.
*/
final String mimeType = format.sampleMimeType;
if (!MimeTypes.isVideo(mimeType)) {
return RendererCapabilities.FORMAT_UNSUPPORTED_TYPE;
}
MediaCodecInfo decoderInfo = null;
try {
MediaCodecSelector mediaCodecSelector = MediaCodecSelector.DEFAULT;
decoderInfo = mediaCodecSelector.getDecoderInfo(mimeType, false);
} catch (MediaCodecUtil.DecoderQueryException e) {
Log.e(LOGTAG, e.getMessage());
}
if (decoderInfo == null) {
return RendererCapabilities.FORMAT_UNSUPPORTED_SUBTYPE;
}
boolean decoderCapable = decoderInfo.isCodecSupported(format.codecs);
if (decoderCapable && format.width > 0 && format.height > 0) {
if (Versions.preLollipop) {
try {
decoderCapable = format.width * format.height <= MediaCodecUtil.maxH264DecodableFrameSize();
} catch (MediaCodecUtil.DecoderQueryException e) {
Log.e(LOGTAG, e.getMessage());
}
if (!decoderCapable) {
if (DEBUG) {
Log.d(LOGTAG, "Check [legacyFrameSize, " +
format.width + "x" + format.height + "]");
}
}
} else {
decoderCapable =
decoderInfo.isVideoSizeAndRateSupportedV21(format.width,
format.height,
format.frameRate);
}
}
int adaptiveSupport = decoderInfo.adaptive ?
RendererCapabilities.ADAPTIVE_SEAMLESS :
RendererCapabilities.ADAPTIVE_NOT_SEAMLESS;
int formatSupport = decoderCapable ?
RendererCapabilities.FORMAT_HANDLED :
RendererCapabilities.FORMAT_EXCEEDS_CAPABILITIES;
return adaptiveSupport | formatSupport;
}
@Override
protected final void createInputBuffer() {
assertTrue(mFormats.size() > 0);
// Calculate maximum size which might be used for target format.
Format currentFormat = mFormats.get(mFormats.size() - 1);
mCodecMaxValues = getCodecMaxValues(currentFormat, mStreamFormats);
// Create a buffer with maximal size for reading source.
// Note : Though we are able to dynamically enlarge buffer size by
// creating DecoderInputBuffer with specific BufferReplacementMode, we
// still allocate a calculated max size buffer for it at first to reduce
// runtime overhead.
mInputBuffer = ByteBuffer.wrap(new byte[mCodecMaxValues.inputSize]);
}
@Override
protected void resetRenderer() {
if (DEBUG) { Log.d(LOGTAG, "[resetRenderer] mInitialized = " + mInitialized); }
if (mInitialized) {
mRendererReconfigured = false;
mRendererReconfigurationState = RECONFIGURATION_STATE.NONE;
mInputBuffer = null;
mCSDInfo = null;
mInitialized = false;
}
}
@Override
protected void handleReconfiguration(DecoderInputBuffer bufferForRead) {
// For adaptive reconfiguration OMX decoders expect all reconfiguration
// data to be supplied at the start of the buffer that also contains
// the first frame in the new format.
assertTrue(mFormats.size() > 0);
if (mRendererReconfigurationState == RECONFIGURATION_STATE.WRITE_PENDING) {
if (DEBUG) { Log.d(LOGTAG, "[feedInput][WRITE_PENDING] put initialization data"); }
Format currentFormat = mFormats.get(mFormats.size() - 1);
for (int i = 0; i < currentFormat.initializationData.size(); i++) {
byte[] data = currentFormat.initializationData.get(i);
bufferForRead.data.put(data);
}
mRendererReconfigurationState = RECONFIGURATION_STATE.QUEUE_PENDING;
}
}
@Override
protected void handleFormatRead(DecoderInputBuffer bufferForRead) {
if (mRendererReconfigurationState == RECONFIGURATION_STATE.QUEUE_PENDING) {
if (DEBUG) { Log.d(LOGTAG, "[feedInput][QUEUE_PENDING] 2 formats in a row."); }
// We received two formats in a row. Clear the current buffer of any reconfiguration data
// associated with the first format.
bufferForRead.clear();
mRendererReconfigurationState = RECONFIGURATION_STATE.WRITE_PENDING;
}
onInputFormatChanged(mFormatHolder.format);
}
@Override
protected void handleEndOfStream(DecoderInputBuffer bufferForRead) {
if (mRendererReconfigurationState == RECONFIGURATION_STATE.QUEUE_PENDING) {
if (DEBUG) { Log.d(LOGTAG, "[feedInput][QUEUE_PENDING] isEndOfStream."); }
// We received a new format immediately before the end of the stream. We need to clear
// the corresponding reconfiguration data from the current buffer, but re-write it into
// a subsequent buffer if there are any (e.g. if the user seeks backwards).
bufferForRead.clear();
mRendererReconfigurationState = RECONFIGURATION_STATE.WRITE_PENDING;
}
mInputStreamEnded = true;
GeckoHlsSample sample = GeckoHlsSample.EOS;
calculatDuration(sample);
}
@Override
protected void handleSamplePreparation(DecoderInputBuffer bufferForRead) {
int csdInfoSize = mCSDInfo != null ? mCSDInfo.length : 0;
int dataSize = bufferForRead.data.limit();
int size = bufferForRead.isKeyFrame() ? csdInfoSize + dataSize : dataSize;
byte[] realData = new byte[size];
if (bufferForRead.isKeyFrame()) {
// Prepend the CSD information to the sample if it's a key frame.
System.arraycopy(mCSDInfo, 0, realData, 0, csdInfoSize);
bufferForRead.data.get(realData, csdInfoSize, dataSize);
} else {
bufferForRead.data.get(realData, 0, dataSize);
}
ByteBuffer buffer = ByteBuffer.wrap(realData);
mInputBuffer = bufferForRead.data;
mInputBuffer.clear();
CryptoInfo cryptoInfo = bufferForRead.isEncrypted() ? bufferForRead.cryptoInfo.getFrameworkCryptoInfoV16() : null;
BufferInfo bufferInfo = new BufferInfo();
// Flags in DecoderInputBuffer are synced with MediaCodec Buffer flags.
int flags = 0;
flags |= bufferForRead.isKeyFrame() ? MediaCodec.BUFFER_FLAG_KEY_FRAME : 0;
flags |= bufferForRead.isEndOfStream() ? MediaCodec.BUFFER_FLAG_END_OF_STREAM : 0;
bufferInfo.set(0, size, bufferForRead.timeUs, flags);
assertTrue(mFormats.size() > 0);
// We add a new format in the list once format changes, so the formatIndex
// should indicate to the last(latest) format.
GeckoHlsSample sample = GeckoHlsSample.create(buffer,
bufferInfo,
cryptoInfo,
mFormats.size() - 1);
// There's no duration information from the ExoPlayer's sample, we need
// to calculate it.
calculatDuration(sample);
mRendererReconfigurationState = RECONFIGURATION_STATE.NONE;
}
@Override
protected void onPositionReset(long positionUs, boolean joining) {
super.onPositionReset(positionUs, joining);
if (mInitialized && mRendererReconfigured && mFormats.size() != 0) {
if (DEBUG) { Log.d(LOGTAG, "[onPositionReset] WRITE_PENDING"); }
// Any reconfiguration data that we put shortly before the reset
// may be invalid. We avoid this issue by sending reconfiguration
// data following every position reset.
mRendererReconfigurationState = RECONFIGURATION_STATE.WRITE_PENDING;
}
}
@Override
protected boolean clearInputSamplesQueue() {
if (DEBUG) { Log.d(LOGTAG, "clearInputSamplesQueue"); }
mDemuxedInputSamples.clear();
mDemuxedNoDurationSamples.clear();
return true;
}
@Override
protected boolean canReconfigure(Format oldFormat, Format newFormat) {
boolean canReconfig = areAdaptationCompatible(oldFormat, newFormat)
&& newFormat.width <= mCodecMaxValues.width && newFormat.height <= mCodecMaxValues.height
&& newFormat.maxInputSize <= mCodecMaxValues.inputSize;
if (DEBUG) { Log.d(LOGTAG, "[canReconfigure] : " + canReconfig); }
return canReconfig;
}
@Override
protected void prepareReconfiguration() {
if (DEBUG) { Log.d(LOGTAG, "[onInputFormatChanged] starting reconfiguration !"); }
mRendererReconfigured = true;
mRendererReconfigurationState = RECONFIGURATION_STATE.WRITE_PENDING;
}
@Override
protected void updateCSDInfo(Format format) {
int size = 0;
for (int i = 0; i < format.initializationData.size(); i++) {
size += format.initializationData.get(i).length;
}
int startPos = 0;
mCSDInfo = new byte[size];
for (int i = 0; i < format.initializationData.size(); i++) {
byte[] data = format.initializationData.get(i);
System.arraycopy(data, 0, mCSDInfo, startPos, data.length);
startPos += data.length;
}
if (DEBUG) { Log.d(LOGTAG, "mCSDInfo [" + Utils.bytesToHex(mCSDInfo) + "]"); }
}
@Override
protected void notifyPlayerInputFormatChanged(Format newFormat) {
mPlayerEventDispatcher.onVideoInputFormatChanged(newFormat);
}
private void calculateSamplesWithin(GeckoHlsSample[] samples, int range) {
// Calculate the first 'range' elements.
for (int i = 0; i < range; i++) {
// Comparing among samples in the window.
for (int j = -2; j < 14; j++) {
if (i + j >= 0 &&
i + j < range &&
samples[i + j].info.presentationTimeUs > samples[i].info.presentationTimeUs) {
samples[i].duration =
Math.min(samples[i].duration,
samples[i + j].info.presentationTimeUs - samples[i].info.presentationTimeUs);
}
}
}
}
private void calculatDuration(GeckoHlsSample inputSample) {
/*
* NOTE :
* Since we customized renderer as a demuxer. Here we're not able to
* obtain duration from the DecoderInputBuffer as there's no duration inside.
* So we calcualte it by referring to nearby samples' timestamp.
* A temporary queue |mDemuxedNoDurationSamples| is used to queue demuxed
* samples from HlsMediaSource which have no duration information at first.
* We're choosing 16 as the comparing window size, because it's commonly
* used as a GOP size.
* Considering there're 16 demuxed samples in the _no duration_ queue already,
* e.g. |-2|-1|0|1|2|3|4|5|6|...|13|
* Once a new demuxed(No duration) sample X (17th) is put into the
* temporary queue,
* e.g. |-2|-1|0|1|2|3|4|5|6|...|13|X|
* we are able to calculate the correct duration for sample 0 by finding
* the closest but greater pts than sample 0 among these 16 samples,
* here, let's say sample -2 to 13.
*/
if (inputSample != null) {
mDemuxedNoDurationSamples.offer(inputSample);
}
int sizeOfNoDura = mDemuxedNoDurationSamples.size();
// A calculation window we've ever found suitable for both HLS TS & FMP4.
int range = sizeOfNoDura >= 17 ? 17 : sizeOfNoDura;
GeckoHlsSample[] inputArray =
mDemuxedNoDurationSamples.toArray(new GeckoHlsSample[sizeOfNoDura]);
if (range >= 17 && !mInputStreamEnded) {
calculateSamplesWithin(inputArray, range);
GeckoHlsSample toQueue = mDemuxedNoDurationSamples.poll();
mDemuxedInputSamples.offer(toQueue);
if (DEBUG) {
Log.d(LOGTAG, "Demuxed sample PTS : " +
toQueue.info.presentationTimeUs + ", duration :" +
toQueue.duration + ", isKeyFrame(" +
toQueue.isKeyFrame() + ", formatIndex(" +
toQueue.formatIndex + "), queue size : " +
mDemuxedInputSamples.size() + ", NoDuQueue size : " +
mDemuxedNoDurationSamples.size());
}
} else if (mInputStreamEnded) {
calculateSamplesWithin(inputArray, sizeOfNoDura);
// NOTE : We're not able to calculate the duration for the last sample.
// A workaround here is to assign a close duration to it.
long prevDuration = 33333;
GeckoHlsSample sample = null;
for (sample = mDemuxedNoDurationSamples.poll(); sample != null; sample = mDemuxedNoDurationSamples.poll()) {
if (sample.duration == Long.MAX_VALUE) {
sample.duration = prevDuration;
if (DEBUG) { Log.d(LOGTAG, "Adjust the PTS of the last sample to " + sample.duration + " (us)"); }
}
prevDuration = sample.duration;
if (DEBUG) {
Log.d(LOGTAG, "last loop to offer samples - PTS : " +
sample.info.presentationTimeUs + ", Duration : " +
sample.duration + ", isEOS : " + sample.isEOS());
}
mDemuxedInputSamples.offer(sample);
}
}
}
// Return the time of first keyframe sample in the queue.
// If there's no key frame in the queue, return the MAX_VALUE so
// MFR won't mistake for that which the decode is getting slow.
public long getNextKeyFrameTime() {
long nextKeyFrameTime = Long.MAX_VALUE;
for (GeckoHlsSample sample : mDemuxedInputSamples) {
if (sample != null &&
(sample.info.flags & MediaCodec.BUFFER_FLAG_KEY_FRAME) != 0) {
nextKeyFrameTime = sample.info.presentationTimeUs;
break;
}
}
return nextKeyFrameTime;
}
@Override
protected void onStreamChanged(Format[] formats) {
mStreamFormats = formats;
}
private static CodecMaxValues getCodecMaxValues(Format format, Format[] streamFormats) {
int maxWidth = format.width;
int maxHeight = format.height;
int maxInputSize = getMaxInputSize(format);
for (Format streamFormat : streamFormats) {
if (areAdaptationCompatible(format, streamFormat)) {
maxWidth = Math.max(maxWidth, streamFormat.width);
maxHeight = Math.max(maxHeight, streamFormat.height);
maxInputSize = Math.max(maxInputSize, getMaxInputSize(streamFormat));
}
}
return new CodecMaxValues(maxWidth, maxHeight, maxInputSize);
}
private static int getMaxInputSize(Format format) {
if (format.maxInputSize != Format.NO_VALUE) {
// The format defines an explicit maximum input size.
return format.maxInputSize;
}
if (format.width == Format.NO_VALUE || format.height == Format.NO_VALUE) {
// We can't infer a maximum input size without video dimensions.
return Format.NO_VALUE;
}
// Attempt to infer a maximum input size from the format.
int maxPixels;
int minCompressionRatio;
switch (format.sampleMimeType) {
case MimeTypes.VIDEO_H264:
// Round up width/height to an integer number of macroblocks.
maxPixels = ((format.width + 15) / 16) * ((format.height + 15) / 16) * 16 * 16;
minCompressionRatio = 2;
break;
default:
// Leave the default max input size.
return Format.NO_VALUE;
}
// Estimate the maximum input size assuming three channel 4:2:0 subsampled input frames.
return (maxPixels * 3) / (2 * minCompressionRatio);
}
private static boolean areAdaptationCompatible(Format first, Format second) {
return first.sampleMimeType.equals(second.sampleMimeType) &&
getRotationDegrees(first) == getRotationDegrees(second);
}
private static int getRotationDegrees(Format format) {
return format.rotationDegrees == Format.NO_VALUE ? 0 : format.rotationDegrees;
}
private static final class CodecMaxValues {
public final int width;
public final int height;
public final int inputSize;
public CodecMaxValues(int width, int height, int inputSize) {
this.width = width;
this.height = height;
this.inputSize = inputSize;
}
}
}

View File

@ -0,0 +1,38 @@
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
package org.mozilla.gecko.media;
import java.nio.ByteBuffer;
import org.mozilla.gecko.annotation.WrapForJNI;
//A subset of the class VideoInfo in dom/media/MediaInfo.h
@WrapForJNI
public final class GeckoVideoInfo {
final public byte[] codecSpecificData;
final public byte[] extraData;
final public int displayWidth;
final public int displayHeight;
final public int pictureWidth;
final public int pictureHeight;
final public int rotation;
final public int stereoMode;
final public long duration;
final public String mimeType;
public GeckoVideoInfo(int displayWidth, int displayHeight,
int pictureWidth, int pictureHeight,
int rotation, int stereoMode, long duration, String mimeType,
byte[] extraData, byte[] codecSpecificData) {
this.displayWidth = displayWidth;
this.displayHeight = displayHeight;
this.pictureWidth = pictureWidth;
this.pictureHeight = pictureHeight;
this.rotation = rotation;
this.stereoMode = stereoMode;
this.duration = duration;
this.mimeType = mimeType;
this.extraData = extraData;
this.codecSpecificData = codecSpecificData;
}
}

View File

@ -0,0 +1,41 @@
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
package org.mozilla.gecko.media;
import android.util.Log;
public class Utils {
public static long getThreadId() {
Thread t = Thread.currentThread();
return t.getId();
}
public static String getThreadSignature() {
Thread t = Thread.currentThread();
long l = t.getId();
String name = t.getName();
long p = t.getPriority();
String gname = t.getThreadGroup().getName();
return (name
+ ":(id)" + l
+ ":(priority)" + p
+ ":(group)" + gname);
}
public static void logThreadSignature() {
Log.d("ThreadUtils", getThreadSignature());
}
private final static char[] hexArray = "0123456789ABCDEF".toCharArray();
public static String bytesToHex(byte[] bytes) {
char[] hexChars = new char[bytes.length * 2];
for ( int j = 0; j < bytes.length; j++ ) {
int v = bytes[j] & 0xFF;
hexChars[j * 2] = hexArray[v >>> 4];
hexChars[j * 2 + 1] = hexArray[v & 0x0F];
}
return new String(hexChars);
}
}

View File

@ -2,7 +2,7 @@ mac-rel-wpt1:
- ./mach clean-nightlies --keep 3 --force
- ./mach build --release
- ./mach test-wpt-failure
- ./mach test-wpt --release --processes 4 --total-chunks 2 --this-chunk 1 --log-raw test-wpt.log --log-errorsummary wpt-errorsummary.log --always-succeed
- ./mach test-wpt --release --processes 4 --total-chunks 4 --this-chunk 1 --log-raw test-wpt.log --log-errorsummary wpt-errorsummary.log --always-succeed
- ./mach filter-intermittents wpt-errorsummary.log --log-intermittents intermittents.log --log-filteredsummary filtered-wpt-errorsummary.log --use-tracker
- ./mach test-wpt --release --binary-arg=--multiprocess --processes 8 --log-raw test-wpt-mp.log --log-errorsummary wpt-mp-errorsummary.log eventsource
- ./mach build-cef --release
@ -12,10 +12,22 @@ mac-rel-wpt1:
mac-rel-wpt2:
- ./mach clean-nightlies --keep 3 --force
- ./mach build --release
- ./mach test-wpt --release --processes 4 --total-chunks 2 --this-chunk 2 --log-raw test-wpt.log --log-errorsummary wpt-errorsummary.log --always-succeed
- ./mach test-wpt --release --processes 4 --total-chunks 4 --this-chunk 2 --log-raw test-wpt.log --log-errorsummary wpt-errorsummary.log --always-succeed
- ./mach filter-intermittents wpt-errorsummary.log --log-intermittents intermittents.log --log-filteredsummary filtered-wpt-errorsummary.log --use-tracker
- ./mach build-geckolib --release
mac-rel-wpt3:
- ./mach clean-nightlies --keep 3 --force
- ./mach build --release
- ./mach test-wpt --release --processes 4 --total-chunks 4 --this-chunk 3 --log-raw test-wpt.log --log-errorsummary wpt-errorsummary.log --always-succeed
- ./mach filter-intermittents wpt-errorsummary.log --log-intermittents intermittents.log --log-filteredsummary filtered-wpt-errorsummary.log --use-tracker
mac-rel-wpt4:
- ./mach clean-nightlies --keep 3 --force
- ./mach build --release
- ./mach test-wpt --release --processes 4 --total-chunks 4 --this-chunk 4 --log-raw test-wpt.log --log-errorsummary wpt-errorsummary.log --always-succeed
- ./mach filter-intermittents wpt-errorsummary.log --log-intermittents intermittents.log --log-filteredsummary filtered-wpt-errorsummary.log --use-tracker
mac-dev-unit:
- ./mach clean-nightlies --keep 3 --force
- env SERVO_RUSTC_LLVM_ASSERTIONS=1 ./mach build --dev
@ -26,14 +38,20 @@ mac-dev-unit:
- bash ./etc/ci/lockfile_changed.sh
- bash ./etc/ci/manifest_changed.sh
mac-rel-css:
mac-rel-css1:
- ./mach clean-nightlies --keep 3 --force
- ./mach build --release
- ./mach test-css --release --processes 4 --log-raw test-css.log --log-errorsummary css-errorsummary.log --always-succeed
- ./mach test-css --release --processes 4 --total-chunks 2 --this-chunk 1 --log-raw test-css.log --log-errorsummary css-errorsummary.log --always-succeed
- ./mach filter-intermittents css-errorsummary.log --log-intermittents intermittents.log --log-filteredsummary filtered-css-errorsummary.log --use-tracker
- bash ./etc/ci/lockfile_changed.sh
- bash ./etc/ci/manifest_changed.sh
mac-rel-css2:
- ./mach clean-nightlies --keep 3 --force
- ./mach build --release
- ./mach test-css --release --processes 4 --total-chunks 2 --this-chunk 2 --log-raw test-css.log --log-errorsummary css-errorsummary.log --always-succeed
- ./mach filter-intermittents css-errorsummary.log --log-intermittents intermittents.log --log-filteredsummary filtered-css-errorsummary.log --use-tracker
mac-nightly:
- ./mach clean-nightlies --keep 3 --force
- ./mach build --release

View File

@ -21,7 +21,7 @@ class TestSecurityNotification(PuppeteerMixin, MarionetteTestCase):
# Secure page
'https://ssl-ev.mozqa.com/',
# Insecure page
'http://www.mozqa.com'
'http://no-ssl.mozqa.com'
]
self.identity_box = self.browser.navbar.locationbar.identity_box

View File

@ -1856,6 +1856,228 @@ constexpr char CodecProxy::NativeCallbacks::OnOutput_t::signature[];
constexpr char CodecProxy::NativeCallbacks::OnOutputFormatChanged_t::name[];
constexpr char CodecProxy::NativeCallbacks::OnOutputFormatChanged_t::signature[];
const char GeckoAudioInfo::name[] =
"org/mozilla/gecko/media/GeckoAudioInfo";
constexpr char GeckoAudioInfo::New_t::name[];
constexpr char GeckoAudioInfo::New_t::signature[];
auto GeckoAudioInfo::New(int32_t a0, int32_t a1, int32_t a2, int32_t a3, int64_t a4, mozilla::jni::String::Param a5, mozilla::jni::ByteArray::Param a6) -> GeckoAudioInfo::LocalRef
{
return mozilla::jni::Constructor<New_t>::Call(GeckoAudioInfo::Context(), nullptr, a0, a1, a2, a3, a4, a5, a6);
}
constexpr char GeckoAudioInfo::BitDepth_t::name[];
constexpr char GeckoAudioInfo::BitDepth_t::signature[];
auto GeckoAudioInfo::BitDepth() const -> int32_t
{
return mozilla::jni::Field<BitDepth_t>::Get(GeckoAudioInfo::mCtx, nullptr);
}
constexpr char GeckoAudioInfo::Channels_t::name[];
constexpr char GeckoAudioInfo::Channels_t::signature[];
auto GeckoAudioInfo::Channels() const -> int32_t
{
return mozilla::jni::Field<Channels_t>::Get(GeckoAudioInfo::mCtx, nullptr);
}
constexpr char GeckoAudioInfo::CodecSpecificData_t::name[];
constexpr char GeckoAudioInfo::CodecSpecificData_t::signature[];
auto GeckoAudioInfo::CodecSpecificData() const -> mozilla::jni::ByteArray::LocalRef
{
return mozilla::jni::Field<CodecSpecificData_t>::Get(GeckoAudioInfo::mCtx, nullptr);
}
constexpr char GeckoAudioInfo::Duration_t::name[];
constexpr char GeckoAudioInfo::Duration_t::signature[];
auto GeckoAudioInfo::Duration() const -> int64_t
{
return mozilla::jni::Field<Duration_t>::Get(GeckoAudioInfo::mCtx, nullptr);
}
constexpr char GeckoAudioInfo::MimeType_t::name[];
constexpr char GeckoAudioInfo::MimeType_t::signature[];
auto GeckoAudioInfo::MimeType() const -> mozilla::jni::String::LocalRef
{
return mozilla::jni::Field<MimeType_t>::Get(GeckoAudioInfo::mCtx, nullptr);
}
constexpr char GeckoAudioInfo::Profile_t::name[];
constexpr char GeckoAudioInfo::Profile_t::signature[];
auto GeckoAudioInfo::Profile() const -> int32_t
{
return mozilla::jni::Field<Profile_t>::Get(GeckoAudioInfo::mCtx, nullptr);
}
constexpr char GeckoAudioInfo::Rate_t::name[];
constexpr char GeckoAudioInfo::Rate_t::signature[];
auto GeckoAudioInfo::Rate() const -> int32_t
{
return mozilla::jni::Field<Rate_t>::Get(GeckoAudioInfo::mCtx, nullptr);
}
const char GeckoHlsSample::name[] =
"org/mozilla/gecko/media/GeckoHlsSample";
constexpr char GeckoHlsSample::IsEOS_t::name[];
constexpr char GeckoHlsSample::IsEOS_t::signature[];
auto GeckoHlsSample::IsEOS() const -> bool
{
return mozilla::jni::Method<IsEOS_t>::Call(GeckoHlsSample::mCtx, nullptr);
}
constexpr char GeckoHlsSample::IsKeyFrame_t::name[];
constexpr char GeckoHlsSample::IsKeyFrame_t::signature[];
auto GeckoHlsSample::IsKeyFrame() const -> bool
{
return mozilla::jni::Method<IsKeyFrame_t>::Call(GeckoHlsSample::mCtx, nullptr);
}
constexpr char GeckoHlsSample::WriteToByteBuffer_t::name[];
constexpr char GeckoHlsSample::WriteToByteBuffer_t::signature[];
auto GeckoHlsSample::WriteToByteBuffer(mozilla::jni::ByteBuffer::Param a0) const -> void
{
return mozilla::jni::Method<WriteToByteBuffer_t>::Call(GeckoHlsSample::mCtx, nullptr, a0);
}
constexpr char GeckoHlsSample::CryptoInfo_t::name[];
constexpr char GeckoHlsSample::CryptoInfo_t::signature[];
auto GeckoHlsSample::CryptoInfo() const -> mozilla::jni::Object::LocalRef
{
return mozilla::jni::Field<CryptoInfo_t>::Get(GeckoHlsSample::mCtx, nullptr);
}
constexpr char GeckoHlsSample::Duration_t::name[];
constexpr char GeckoHlsSample::Duration_t::signature[];
auto GeckoHlsSample::Duration() const -> int64_t
{
return mozilla::jni::Field<Duration_t>::Get(GeckoHlsSample::mCtx, nullptr);
}
auto GeckoHlsSample::Duration(int64_t a0) const -> void
{
return mozilla::jni::Field<Duration_t>::Set(GeckoHlsSample::mCtx, nullptr, a0);
}
constexpr char GeckoHlsSample::FormatIndex_t::name[];
constexpr char GeckoHlsSample::FormatIndex_t::signature[];
auto GeckoHlsSample::FormatIndex() const -> int32_t
{
return mozilla::jni::Field<FormatIndex_t>::Get(GeckoHlsSample::mCtx, nullptr);
}
constexpr char GeckoHlsSample::Info_t::name[];
constexpr char GeckoHlsSample::Info_t::signature[];
auto GeckoHlsSample::Info() const -> mozilla::jni::Object::LocalRef
{
return mozilla::jni::Field<Info_t>::Get(GeckoHlsSample::mCtx, nullptr);
}
const char GeckoVideoInfo::name[] =
"org/mozilla/gecko/media/GeckoVideoInfo";
constexpr char GeckoVideoInfo::New_t::name[];
constexpr char GeckoVideoInfo::New_t::signature[];
auto GeckoVideoInfo::New(int32_t a0, int32_t a1, int32_t a2, int32_t a3, int32_t a4, int32_t a5, int64_t a6, mozilla::jni::String::Param a7, mozilla::jni::ByteArray::Param a8, mozilla::jni::ByteArray::Param a9) -> GeckoVideoInfo::LocalRef
{
return mozilla::jni::Constructor<New_t>::Call(GeckoVideoInfo::Context(), nullptr, a0, a1, a2, a3, a4, a5, a6, a7, a8, a9);
}
constexpr char GeckoVideoInfo::CodecSpecificData_t::name[];
constexpr char GeckoVideoInfo::CodecSpecificData_t::signature[];
auto GeckoVideoInfo::CodecSpecificData() const -> mozilla::jni::ByteArray::LocalRef
{
return mozilla::jni::Field<CodecSpecificData_t>::Get(GeckoVideoInfo::mCtx, nullptr);
}
constexpr char GeckoVideoInfo::DisplayHeight_t::name[];
constexpr char GeckoVideoInfo::DisplayHeight_t::signature[];
auto GeckoVideoInfo::DisplayHeight() const -> int32_t
{
return mozilla::jni::Field<DisplayHeight_t>::Get(GeckoVideoInfo::mCtx, nullptr);
}
constexpr char GeckoVideoInfo::DisplayWidth_t::name[];
constexpr char GeckoVideoInfo::DisplayWidth_t::signature[];
auto GeckoVideoInfo::DisplayWidth() const -> int32_t
{
return mozilla::jni::Field<DisplayWidth_t>::Get(GeckoVideoInfo::mCtx, nullptr);
}
constexpr char GeckoVideoInfo::Duration_t::name[];
constexpr char GeckoVideoInfo::Duration_t::signature[];
auto GeckoVideoInfo::Duration() const -> int64_t
{
return mozilla::jni::Field<Duration_t>::Get(GeckoVideoInfo::mCtx, nullptr);
}
constexpr char GeckoVideoInfo::ExtraData_t::name[];
constexpr char GeckoVideoInfo::ExtraData_t::signature[];
auto GeckoVideoInfo::ExtraData() const -> mozilla::jni::ByteArray::LocalRef
{
return mozilla::jni::Field<ExtraData_t>::Get(GeckoVideoInfo::mCtx, nullptr);
}
constexpr char GeckoVideoInfo::MimeType_t::name[];
constexpr char GeckoVideoInfo::MimeType_t::signature[];
auto GeckoVideoInfo::MimeType() const -> mozilla::jni::String::LocalRef
{
return mozilla::jni::Field<MimeType_t>::Get(GeckoVideoInfo::mCtx, nullptr);
}
constexpr char GeckoVideoInfo::PictureHeight_t::name[];
constexpr char GeckoVideoInfo::PictureHeight_t::signature[];
auto GeckoVideoInfo::PictureHeight() const -> int32_t
{
return mozilla::jni::Field<PictureHeight_t>::Get(GeckoVideoInfo::mCtx, nullptr);
}
constexpr char GeckoVideoInfo::PictureWidth_t::name[];
constexpr char GeckoVideoInfo::PictureWidth_t::signature[];
auto GeckoVideoInfo::PictureWidth() const -> int32_t
{
return mozilla::jni::Field<PictureWidth_t>::Get(GeckoVideoInfo::mCtx, nullptr);
}
constexpr char GeckoVideoInfo::Rotation_t::name[];
constexpr char GeckoVideoInfo::Rotation_t::signature[];
auto GeckoVideoInfo::Rotation() const -> int32_t
{
return mozilla::jni::Field<Rotation_t>::Get(GeckoVideoInfo::mCtx, nullptr);
}
constexpr char GeckoVideoInfo::StereoMode_t::name[];
constexpr char GeckoVideoInfo::StereoMode_t::signature[];
auto GeckoVideoInfo::StereoMode() const -> int32_t
{
return mozilla::jni::Field<StereoMode_t>::Get(GeckoVideoInfo::mCtx, nullptr);
}
const char MediaDrmProxy::name[] =
"org/mozilla/gecko/media/MediaDrmProxy";

View File

@ -5371,6 +5371,556 @@ public:
template<class Impl> class Natives;
};
class GeckoAudioInfo : public mozilla::jni::ObjectBase<GeckoAudioInfo>
{
public:
static const char name[];
explicit GeckoAudioInfo(const Context& ctx) : ObjectBase<GeckoAudioInfo>(ctx) {}
struct New_t {
typedef GeckoAudioInfo Owner;
typedef GeckoAudioInfo::LocalRef ReturnType;
typedef GeckoAudioInfo::Param SetterType;
typedef mozilla::jni::Args<
int32_t,
int32_t,
int32_t,
int32_t,
int64_t,
mozilla::jni::String::Param,
mozilla::jni::ByteArray::Param> Args;
static constexpr char name[] = "<init>";
static constexpr char signature[] =
"(IIIIJLjava/lang/String;[B)V";
static const bool isStatic = false;
static const mozilla::jni::ExceptionMode exceptionMode =
mozilla::jni::ExceptionMode::ABORT;
static const mozilla::jni::CallingThread callingThread =
mozilla::jni::CallingThread::ANY;
static const mozilla::jni::DispatchTarget dispatchTarget =
mozilla::jni::DispatchTarget::CURRENT;
};
static auto New(int32_t, int32_t, int32_t, int32_t, int64_t, mozilla::jni::String::Param, mozilla::jni::ByteArray::Param) -> GeckoAudioInfo::LocalRef;
struct BitDepth_t {
typedef GeckoAudioInfo Owner;
typedef int32_t ReturnType;
typedef int32_t SetterType;
typedef mozilla::jni::Args<> Args;
static constexpr char name[] = "bitDepth";
static constexpr char signature[] =
"I";
static const bool isStatic = false;
static const mozilla::jni::ExceptionMode exceptionMode =
mozilla::jni::ExceptionMode::ABORT;
static const mozilla::jni::CallingThread callingThread =
mozilla::jni::CallingThread::ANY;
static const mozilla::jni::DispatchTarget dispatchTarget =
mozilla::jni::DispatchTarget::CURRENT;
};
auto BitDepth() const -> int32_t;
struct Channels_t {
typedef GeckoAudioInfo Owner;
typedef int32_t ReturnType;
typedef int32_t SetterType;
typedef mozilla::jni::Args<> Args;
static constexpr char name[] = "channels";
static constexpr char signature[] =
"I";
static const bool isStatic = false;
static const mozilla::jni::ExceptionMode exceptionMode =
mozilla::jni::ExceptionMode::ABORT;
static const mozilla::jni::CallingThread callingThread =
mozilla::jni::CallingThread::ANY;
static const mozilla::jni::DispatchTarget dispatchTarget =
mozilla::jni::DispatchTarget::CURRENT;
};
auto Channels() const -> int32_t;
struct CodecSpecificData_t {
typedef GeckoAudioInfo Owner;
typedef mozilla::jni::ByteArray::LocalRef ReturnType;
typedef mozilla::jni::ByteArray::Param SetterType;
typedef mozilla::jni::Args<> Args;
static constexpr char name[] = "codecSpecificData";
static constexpr char signature[] =
"[B";
static const bool isStatic = false;
static const mozilla::jni::ExceptionMode exceptionMode =
mozilla::jni::ExceptionMode::ABORT;
static const mozilla::jni::CallingThread callingThread =
mozilla::jni::CallingThread::ANY;
static const mozilla::jni::DispatchTarget dispatchTarget =
mozilla::jni::DispatchTarget::CURRENT;
};
auto CodecSpecificData() const -> mozilla::jni::ByteArray::LocalRef;
struct Duration_t {
typedef GeckoAudioInfo Owner;
typedef int64_t ReturnType;
typedef int64_t SetterType;
typedef mozilla::jni::Args<> Args;
static constexpr char name[] = "duration";
static constexpr char signature[] =
"J";
static const bool isStatic = false;
static const mozilla::jni::ExceptionMode exceptionMode =
mozilla::jni::ExceptionMode::ABORT;
static const mozilla::jni::CallingThread callingThread =
mozilla::jni::CallingThread::ANY;
static const mozilla::jni::DispatchTarget dispatchTarget =
mozilla::jni::DispatchTarget::CURRENT;
};
auto Duration() const -> int64_t;
struct MimeType_t {
typedef GeckoAudioInfo Owner;
typedef mozilla::jni::String::LocalRef ReturnType;
typedef mozilla::jni::String::Param SetterType;
typedef mozilla::jni::Args<> Args;
static constexpr char name[] = "mimeType";
static constexpr char signature[] =
"Ljava/lang/String;";
static const bool isStatic = false;
static const mozilla::jni::ExceptionMode exceptionMode =
mozilla::jni::ExceptionMode::ABORT;
static const mozilla::jni::CallingThread callingThread =
mozilla::jni::CallingThread::ANY;
static const mozilla::jni::DispatchTarget dispatchTarget =
mozilla::jni::DispatchTarget::CURRENT;
};
auto MimeType() const -> mozilla::jni::String::LocalRef;
struct Profile_t {
typedef GeckoAudioInfo Owner;
typedef int32_t ReturnType;
typedef int32_t SetterType;
typedef mozilla::jni::Args<> Args;
static constexpr char name[] = "profile";
static constexpr char signature[] =
"I";
static const bool isStatic = false;
static const mozilla::jni::ExceptionMode exceptionMode =
mozilla::jni::ExceptionMode::ABORT;
static const mozilla::jni::CallingThread callingThread =
mozilla::jni::CallingThread::ANY;
static const mozilla::jni::DispatchTarget dispatchTarget =
mozilla::jni::DispatchTarget::CURRENT;
};
auto Profile() const -> int32_t;
struct Rate_t {
typedef GeckoAudioInfo Owner;
typedef int32_t ReturnType;
typedef int32_t SetterType;
typedef mozilla::jni::Args<> Args;
static constexpr char name[] = "rate";
static constexpr char signature[] =
"I";
static const bool isStatic = false;
static const mozilla::jni::ExceptionMode exceptionMode =
mozilla::jni::ExceptionMode::ABORT;
static const mozilla::jni::CallingThread callingThread =
mozilla::jni::CallingThread::ANY;
static const mozilla::jni::DispatchTarget dispatchTarget =
mozilla::jni::DispatchTarget::CURRENT;
};
auto Rate() const -> int32_t;
static const mozilla::jni::CallingThread callingThread =
mozilla::jni::CallingThread::ANY;
};
class GeckoHlsSample : public mozilla::jni::ObjectBase<GeckoHlsSample>
{
public:
static const char name[];
explicit GeckoHlsSample(const Context& ctx) : ObjectBase<GeckoHlsSample>(ctx) {}
struct IsEOS_t {
typedef GeckoHlsSample Owner;
typedef bool ReturnType;
typedef bool SetterType;
typedef mozilla::jni::Args<> Args;
static constexpr char name[] = "isEOS";
static constexpr char signature[] =
"()Z";
static const bool isStatic = false;
static const mozilla::jni::ExceptionMode exceptionMode =
mozilla::jni::ExceptionMode::ABORT;
static const mozilla::jni::CallingThread callingThread =
mozilla::jni::CallingThread::ANY;
static const mozilla::jni::DispatchTarget dispatchTarget =
mozilla::jni::DispatchTarget::CURRENT;
};
auto IsEOS() const -> bool;
struct IsKeyFrame_t {
typedef GeckoHlsSample Owner;
typedef bool ReturnType;
typedef bool SetterType;
typedef mozilla::jni::Args<> Args;
static constexpr char name[] = "isKeyFrame";
static constexpr char signature[] =
"()Z";
static const bool isStatic = false;
static const mozilla::jni::ExceptionMode exceptionMode =
mozilla::jni::ExceptionMode::ABORT;
static const mozilla::jni::CallingThread callingThread =
mozilla::jni::CallingThread::ANY;
static const mozilla::jni::DispatchTarget dispatchTarget =
mozilla::jni::DispatchTarget::CURRENT;
};
auto IsKeyFrame() const -> bool;
struct WriteToByteBuffer_t {
typedef GeckoHlsSample Owner;
typedef void ReturnType;
typedef void SetterType;
typedef mozilla::jni::Args<
mozilla::jni::ByteBuffer::Param> Args;
static constexpr char name[] = "writeToByteBuffer";
static constexpr char signature[] =
"(Ljava/nio/ByteBuffer;)V";
static const bool isStatic = false;
static const mozilla::jni::ExceptionMode exceptionMode =
mozilla::jni::ExceptionMode::ABORT;
static const mozilla::jni::CallingThread callingThread =
mozilla::jni::CallingThread::ANY;
static const mozilla::jni::DispatchTarget dispatchTarget =
mozilla::jni::DispatchTarget::CURRENT;
};
auto WriteToByteBuffer(mozilla::jni::ByteBuffer::Param) const -> void;
struct CryptoInfo_t {
typedef GeckoHlsSample Owner;
typedef mozilla::jni::Object::LocalRef ReturnType;
typedef mozilla::jni::Object::Param SetterType;
typedef mozilla::jni::Args<> Args;
static constexpr char name[] = "cryptoInfo";
static constexpr char signature[] =
"Landroid/media/MediaCodec$CryptoInfo;";
static const bool isStatic = false;
static const mozilla::jni::ExceptionMode exceptionMode =
mozilla::jni::ExceptionMode::ABORT;
static const mozilla::jni::CallingThread callingThread =
mozilla::jni::CallingThread::ANY;
static const mozilla::jni::DispatchTarget dispatchTarget =
mozilla::jni::DispatchTarget::CURRENT;
};
auto CryptoInfo() const -> mozilla::jni::Object::LocalRef;
struct Duration_t {
typedef GeckoHlsSample Owner;
typedef int64_t ReturnType;
typedef int64_t SetterType;
typedef mozilla::jni::Args<> Args;
static constexpr char name[] = "duration";
static constexpr char signature[] =
"J";
static const bool isStatic = false;
static const mozilla::jni::ExceptionMode exceptionMode =
mozilla::jni::ExceptionMode::ABORT;
static const mozilla::jni::CallingThread callingThread =
mozilla::jni::CallingThread::ANY;
static const mozilla::jni::DispatchTarget dispatchTarget =
mozilla::jni::DispatchTarget::CURRENT;
};
auto Duration() const -> int64_t;
auto Duration(int64_t) const -> void;
struct FormatIndex_t {
typedef GeckoHlsSample Owner;
typedef int32_t ReturnType;
typedef int32_t SetterType;
typedef mozilla::jni::Args<> Args;
static constexpr char name[] = "formatIndex";
static constexpr char signature[] =
"I";
static const bool isStatic = false;
static const mozilla::jni::ExceptionMode exceptionMode =
mozilla::jni::ExceptionMode::ABORT;
static const mozilla::jni::CallingThread callingThread =
mozilla::jni::CallingThread::ANY;
static const mozilla::jni::DispatchTarget dispatchTarget =
mozilla::jni::DispatchTarget::CURRENT;
};
auto FormatIndex() const -> int32_t;
struct Info_t {
typedef GeckoHlsSample Owner;
typedef mozilla::jni::Object::LocalRef ReturnType;
typedef mozilla::jni::Object::Param SetterType;
typedef mozilla::jni::Args<> Args;
static constexpr char name[] = "info";
static constexpr char signature[] =
"Landroid/media/MediaCodec$BufferInfo;";
static const bool isStatic = false;
static const mozilla::jni::ExceptionMode exceptionMode =
mozilla::jni::ExceptionMode::ABORT;
static const mozilla::jni::CallingThread callingThread =
mozilla::jni::CallingThread::ANY;
static const mozilla::jni::DispatchTarget dispatchTarget =
mozilla::jni::DispatchTarget::CURRENT;
};
auto Info() const -> mozilla::jni::Object::LocalRef;
static const mozilla::jni::CallingThread callingThread =
mozilla::jni::CallingThread::ANY;
};
class GeckoVideoInfo : public mozilla::jni::ObjectBase<GeckoVideoInfo>
{
public:
static const char name[];
explicit GeckoVideoInfo(const Context& ctx) : ObjectBase<GeckoVideoInfo>(ctx) {}
struct New_t {
typedef GeckoVideoInfo Owner;
typedef GeckoVideoInfo::LocalRef ReturnType;
typedef GeckoVideoInfo::Param SetterType;
typedef mozilla::jni::Args<
int32_t,
int32_t,
int32_t,
int32_t,
int32_t,
int32_t,
int64_t,
mozilla::jni::String::Param,
mozilla::jni::ByteArray::Param,
mozilla::jni::ByteArray::Param> Args;
static constexpr char name[] = "<init>";
static constexpr char signature[] =
"(IIIIIIJLjava/lang/String;[B[B)V";
static const bool isStatic = false;
static const mozilla::jni::ExceptionMode exceptionMode =
mozilla::jni::ExceptionMode::ABORT;
static const mozilla::jni::CallingThread callingThread =
mozilla::jni::CallingThread::ANY;
static const mozilla::jni::DispatchTarget dispatchTarget =
mozilla::jni::DispatchTarget::CURRENT;
};
static auto New(int32_t, int32_t, int32_t, int32_t, int32_t, int32_t, int64_t, mozilla::jni::String::Param, mozilla::jni::ByteArray::Param, mozilla::jni::ByteArray::Param) -> GeckoVideoInfo::LocalRef;
struct CodecSpecificData_t {
typedef GeckoVideoInfo Owner;
typedef mozilla::jni::ByteArray::LocalRef ReturnType;
typedef mozilla::jni::ByteArray::Param SetterType;
typedef mozilla::jni::Args<> Args;
static constexpr char name[] = "codecSpecificData";
static constexpr char signature[] =
"[B";
static const bool isStatic = false;
static const mozilla::jni::ExceptionMode exceptionMode =
mozilla::jni::ExceptionMode::ABORT;
static const mozilla::jni::CallingThread callingThread =
mozilla::jni::CallingThread::ANY;
static const mozilla::jni::DispatchTarget dispatchTarget =
mozilla::jni::DispatchTarget::CURRENT;
};
auto CodecSpecificData() const -> mozilla::jni::ByteArray::LocalRef;
struct DisplayHeight_t {
typedef GeckoVideoInfo Owner;
typedef int32_t ReturnType;
typedef int32_t SetterType;
typedef mozilla::jni::Args<> Args;
static constexpr char name[] = "displayHeight";
static constexpr char signature[] =
"I";
static const bool isStatic = false;
static const mozilla::jni::ExceptionMode exceptionMode =
mozilla::jni::ExceptionMode::ABORT;
static const mozilla::jni::CallingThread callingThread =
mozilla::jni::CallingThread::ANY;
static const mozilla::jni::DispatchTarget dispatchTarget =
mozilla::jni::DispatchTarget::CURRENT;
};
auto DisplayHeight() const -> int32_t;
struct DisplayWidth_t {
typedef GeckoVideoInfo Owner;
typedef int32_t ReturnType;
typedef int32_t SetterType;
typedef mozilla::jni::Args<> Args;
static constexpr char name[] = "displayWidth";
static constexpr char signature[] =
"I";
static const bool isStatic = false;
static const mozilla::jni::ExceptionMode exceptionMode =
mozilla::jni::ExceptionMode::ABORT;
static const mozilla::jni::CallingThread callingThread =
mozilla::jni::CallingThread::ANY;
static const mozilla::jni::DispatchTarget dispatchTarget =
mozilla::jni::DispatchTarget::CURRENT;
};
auto DisplayWidth() const -> int32_t;
struct Duration_t {
typedef GeckoVideoInfo Owner;
typedef int64_t ReturnType;
typedef int64_t SetterType;
typedef mozilla::jni::Args<> Args;
static constexpr char name[] = "duration";
static constexpr char signature[] =
"J";
static const bool isStatic = false;
static const mozilla::jni::ExceptionMode exceptionMode =
mozilla::jni::ExceptionMode::ABORT;
static const mozilla::jni::CallingThread callingThread =
mozilla::jni::CallingThread::ANY;
static const mozilla::jni::DispatchTarget dispatchTarget =
mozilla::jni::DispatchTarget::CURRENT;
};
auto Duration() const -> int64_t;
struct ExtraData_t {
typedef GeckoVideoInfo Owner;
typedef mozilla::jni::ByteArray::LocalRef ReturnType;
typedef mozilla::jni::ByteArray::Param SetterType;
typedef mozilla::jni::Args<> Args;
static constexpr char name[] = "extraData";
static constexpr char signature[] =
"[B";
static const bool isStatic = false;
static const mozilla::jni::ExceptionMode exceptionMode =
mozilla::jni::ExceptionMode::ABORT;
static const mozilla::jni::CallingThread callingThread =
mozilla::jni::CallingThread::ANY;
static const mozilla::jni::DispatchTarget dispatchTarget =
mozilla::jni::DispatchTarget::CURRENT;
};
auto ExtraData() const -> mozilla::jni::ByteArray::LocalRef;
struct MimeType_t {
typedef GeckoVideoInfo Owner;
typedef mozilla::jni::String::LocalRef ReturnType;
typedef mozilla::jni::String::Param SetterType;
typedef mozilla::jni::Args<> Args;
static constexpr char name[] = "mimeType";
static constexpr char signature[] =
"Ljava/lang/String;";
static const bool isStatic = false;
static const mozilla::jni::ExceptionMode exceptionMode =
mozilla::jni::ExceptionMode::ABORT;
static const mozilla::jni::CallingThread callingThread =
mozilla::jni::CallingThread::ANY;
static const mozilla::jni::DispatchTarget dispatchTarget =
mozilla::jni::DispatchTarget::CURRENT;
};
auto MimeType() const -> mozilla::jni::String::LocalRef;
struct PictureHeight_t {
typedef GeckoVideoInfo Owner;
typedef int32_t ReturnType;
typedef int32_t SetterType;
typedef mozilla::jni::Args<> Args;
static constexpr char name[] = "pictureHeight";
static constexpr char signature[] =
"I";
static const bool isStatic = false;
static const mozilla::jni::ExceptionMode exceptionMode =
mozilla::jni::ExceptionMode::ABORT;
static const mozilla::jni::CallingThread callingThread =
mozilla::jni::CallingThread::ANY;
static const mozilla::jni::DispatchTarget dispatchTarget =
mozilla::jni::DispatchTarget::CURRENT;
};
auto PictureHeight() const -> int32_t;
struct PictureWidth_t {
typedef GeckoVideoInfo Owner;
typedef int32_t ReturnType;
typedef int32_t SetterType;
typedef mozilla::jni::Args<> Args;
static constexpr char name[] = "pictureWidth";
static constexpr char signature[] =
"I";
static const bool isStatic = false;
static const mozilla::jni::ExceptionMode exceptionMode =
mozilla::jni::ExceptionMode::ABORT;
static const mozilla::jni::CallingThread callingThread =
mozilla::jni::CallingThread::ANY;
static const mozilla::jni::DispatchTarget dispatchTarget =
mozilla::jni::DispatchTarget::CURRENT;
};
auto PictureWidth() const -> int32_t;
struct Rotation_t {
typedef GeckoVideoInfo Owner;
typedef int32_t ReturnType;
typedef int32_t SetterType;
typedef mozilla::jni::Args<> Args;
static constexpr char name[] = "rotation";
static constexpr char signature[] =
"I";
static const bool isStatic = false;
static const mozilla::jni::ExceptionMode exceptionMode =
mozilla::jni::ExceptionMode::ABORT;
static const mozilla::jni::CallingThread callingThread =
mozilla::jni::CallingThread::ANY;
static const mozilla::jni::DispatchTarget dispatchTarget =
mozilla::jni::DispatchTarget::CURRENT;
};
auto Rotation() const -> int32_t;
struct StereoMode_t {
typedef GeckoVideoInfo Owner;
typedef int32_t ReturnType;
typedef int32_t SetterType;
typedef mozilla::jni::Args<> Args;
static constexpr char name[] = "stereoMode";
static constexpr char signature[] =
"I";
static const bool isStatic = false;
static const mozilla::jni::ExceptionMode exceptionMode =
mozilla::jni::ExceptionMode::ABORT;
static const mozilla::jni::CallingThread callingThread =
mozilla::jni::CallingThread::ANY;
static const mozilla::jni::DispatchTarget dispatchTarget =
mozilla::jni::DispatchTarget::CURRENT;
};
auto StereoMode() const -> int32_t;
static const mozilla::jni::CallingThread callingThread =
mozilla::jni::CallingThread::ANY;
};
class MediaDrmProxy : public mozilla::jni::ObjectBase<MediaDrmProxy>
{
public: