Merge mozilla-central to autoland. a=merge on a CLOSED TREE

This commit is contained in:
Daniel Varga 2019-08-04 01:17:02 +03:00
commit a1545c9a73
63 changed files with 1735 additions and 2384 deletions

View File

@ -5,7 +5,6 @@
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
#include "Blob.h"
#include "EmptyBlobImpl.h"
#include "File.h"
#include "MemoryBlobImpl.h"
#include "mozilla/dom/BlobBinding.h"
@ -73,14 +72,6 @@ Blob* Blob::Create(nsISupports* aParent, BlobImpl* aImpl) {
return aImpl->IsFile() ? new File(aParent, aImpl) : new Blob(aParent, aImpl);
}
/* static */
already_AddRefed<Blob> Blob::CreateEmptyBlob(nsISupports* aParent,
const nsAString& aContentType) {
RefPtr<Blob> blob = Blob::Create(aParent, new EmptyBlobImpl(aContentType));
MOZ_ASSERT(!blob->mImpl->IsFile());
return blob.forget();
}
/* static */
already_AddRefed<Blob> Blob::CreateStringBlob(nsISupports* aParent,
const nsACString& aData,

View File

@ -50,9 +50,6 @@ class Blob : public nsIMutable,
// This creates a Blob or a File based on the type of BlobImpl.
static Blob* Create(nsISupports* aParent, BlobImpl* aImpl);
static already_AddRefed<Blob> CreateEmptyBlob(nsISupports* aParent,
const nsAString& aContentType);
static already_AddRefed<Blob> CreateStringBlob(nsISupports* aParent,
const nsACString& aData,
const nsAString& aContentType);

View File

@ -19,9 +19,11 @@
#include "IDBTransaction.h"
#include "IndexedDatabase.h"
#include "IndexedDatabaseInlines.h"
#include <mozIIPCBlobInputStream.h>
#include "mozilla/BasicEvents.h"
#include "mozilla/CycleCollectedJSRuntime.h"
#include "mozilla/Maybe.h"
#include "mozilla/SnappyUncompressInputStream.h"
#include "mozilla/TypeTraits.h"
#include "mozilla/dom/Element.h"
#include "mozilla/dom/Event.h"
@ -73,6 +75,16 @@ namespace dom {
namespace indexedDB {
namespace {
/*******************************************************************************
* Constants
******************************************************************************/
const uint32_t kFileCopyBufferSize = 32768;
} // namespace
/*******************************************************************************
* ThreadLocal
******************************************************************************/
@ -496,14 +508,11 @@ class PermissionRequestMainProcessHelper final : public PermissionRequestBase {
void DeserializeStructuredCloneFiles(
IDBDatabase* aDatabase,
const nsTArray<SerializedStructuredCloneFile>& aSerializedFiles,
const nsTArray<RefPtr<JS::WasmModule>>* aModuleSet,
nsTArray<StructuredCloneFile>& aFiles) {
MOZ_ASSERT_IF(aModuleSet, !aModuleSet->IsEmpty());
bool aForPreprocess, nsTArray<StructuredCloneFile>& aFiles) {
MOZ_ASSERT(aFiles.IsEmpty());
MOZ_ASSERT_IF(aForPreprocess, aSerializedFiles.Length() == 1);
if (!aSerializedFiles.IsEmpty()) {
uint32_t moduleIndex = 0;
const uint32_t count = aSerializedFiles.Length();
aFiles.SetCapacity(count);
@ -511,6 +520,9 @@ void DeserializeStructuredCloneFiles(
const SerializedStructuredCloneFile& serializedFile =
aSerializedFiles[index];
MOZ_ASSERT_IF(aForPreprocess, serializedFile.type() ==
StructuredCloneFile::eStructuredClone);
const BlobOrMutableFile& blobOrMutableFile = serializedFile.file();
switch (serializedFile.type()) {
@ -579,55 +591,47 @@ void DeserializeStructuredCloneFiles(
}
case StructuredCloneFile::eStructuredClone: {
StructuredCloneFile* file = aFiles.AppendElement();
MOZ_ASSERT(file);
if (aForPreprocess) {
MOZ_ASSERT(blobOrMutableFile.type() == BlobOrMutableFile::TIPCBlob);
file->mType = StructuredCloneFile::eStructuredClone;
const IPCBlob& ipcBlob = blobOrMutableFile.get_IPCBlob();
break;
}
RefPtr<BlobImpl> blobImpl = IPCBlobUtils::Deserialize(ipcBlob);
MOZ_ASSERT(blobImpl);
case StructuredCloneFile::eWasmBytecode: {
if (aModuleSet) {
RefPtr<Blob> blob =
Blob::Create(aDatabase->GetOwnerGlobal(), blobImpl);
StructuredCloneFile* file = aFiles.AppendElement();
MOZ_ASSERT(file);
file->mType = StructuredCloneFile::eStructuredClone;
file->mBlob.swap(blob);
} else {
MOZ_ASSERT(blobOrMutableFile.type() == BlobOrMutableFile::Tnull_t);
StructuredCloneFile* file = aFiles.AppendElement();
MOZ_ASSERT(file);
file->mType = StructuredCloneFile::eWasmBytecode;
MOZ_ASSERT(moduleIndex < aModuleSet->Length());
file->mWasmModule = aModuleSet->ElementAt(moduleIndex);
moduleIndex++;
break;
file->mType = StructuredCloneFile::eStructuredClone;
}
MOZ_ASSERT(blobOrMutableFile.type() == BlobOrMutableFile::TIPCBlob);
const IPCBlob& ipcBlob = blobOrMutableFile.get_IPCBlob();
RefPtr<BlobImpl> blobImpl = IPCBlobUtils::Deserialize(ipcBlob);
MOZ_ASSERT(blobImpl);
RefPtr<Blob> blob =
Blob::Create(aDatabase->GetOwnerGlobal(), blobImpl);
StructuredCloneFile* file = aFiles.AppendElement();
MOZ_ASSERT(file);
file->mType = StructuredCloneFile::eWasmBytecode;
file->mBlob.swap(blob);
break;
}
case StructuredCloneFile::eWasmBytecode:
case StructuredCloneFile::eWasmCompiled: {
MOZ_ASSERT(blobOrMutableFile.type() == BlobOrMutableFile::Tnull_t);
StructuredCloneFile* file = aFiles.AppendElement();
MOZ_ASSERT(file);
file->mType = StructuredCloneFile::eWasmCompiled;
file->mType = serializedFile.type();
// Don't set mBlob, support for storing WebAssembly.Modules has been
// removed in bug 1469395. Support for de-serialization of
// WebAssembly.Modules has been removed in bug 1561876. Full removal
// is tracked in bug 1487479.
break;
}
@ -1284,29 +1288,42 @@ class BackgroundRequestChild::PreprocessHelper final
: public CancelableRunnable,
public nsIInputStreamCallback,
public nsIFileMetadataCallback {
enum class State {
// Just created on the owning thread, dispatched to the thread pool. Next
// step is either Finishing if stream was ready to be read or
// WaitingForStreamReady if the stream is not ready.
Initial,
// Waiting for stream to be ready on a thread pool thread. Next state is
// Finishing.
WaitingForStreamReady,
// Waiting to finish/finishing on the owning thread. Next step is Completed.
Finishing,
// All done.
Completed
};
nsCOMPtr<nsIEventTarget> mOwningEventTarget;
nsTArray<nsCOMPtr<nsIInputStream>> mStreams;
nsTArray<RefPtr<JS::WasmModule>> mModuleSet;
BackgroundRequestChild* mActor;
// This is populated when the processing of the stream runs.
PRFileDesc* mCurrentBytecodeFileDesc;
RefPtr<TaskQueue> mTaskQueue;
nsCOMPtr<nsIEventTarget> mTaskQueueEventTarget;
uint32_t mModuleSetIndex;
nsCOMPtr<nsIInputStream> mStream;
UniquePtr<JSStructuredCloneData> mCloneData;
BackgroundRequestChild* mActor;
uint32_t mCloneDataIndex;
nsresult mResultCode;
State mState;
public:
PreprocessHelper(uint32_t aModuleSetIndex, BackgroundRequestChild* aActor)
PreprocessHelper(uint32_t aCloneDataIndex, BackgroundRequestChild* aActor)
: CancelableRunnable(
"indexedDB::BackgroundRequestChild::PreprocessHelper"),
mOwningEventTarget(aActor->GetActorEventTarget()),
mActor(aActor),
mCurrentBytecodeFileDesc(nullptr),
mModuleSetIndex(aModuleSetIndex),
mResultCode(NS_OK) {
mCloneDataIndex(aCloneDataIndex),
mResultCode(NS_OK),
mState(State::Initial) {
AssertIsOnOwningThread();
MOZ_ASSERT(aActor);
aActor->AssertIsOnOwningThread();
@ -1328,33 +1345,29 @@ class BackgroundRequestChild::PreprocessHelper final
mActor = nullptr;
}
nsresult Init(const nsTArray<StructuredCloneFile>& aFiles);
nsresult Init(const StructuredCloneFile& aFile);
nsresult Dispatch();
private:
~PreprocessHelper() {
MOZ_ASSERT(mState == State::Initial || mState == State::Completed);
if (mTaskQueue) {
mTaskQueue->BeginShutdown();
}
}
void RunOnOwningThread();
nsresult Start();
void ProcessCurrentStream();
nsresult ProcessStream();
nsresult WaitForStreamReady(nsIInputStream* aInputStream);
void ContinueWithStatus(nsresult aStatus);
nsresult DataIsReady(nsIInputStream* aInputStream);
void Finish();
NS_DECL_ISUPPORTS_INHERITED
NS_DECL_NSIRUNNABLE
NS_DECL_NSIINPUTSTREAMCALLBACK
NS_DECL_NSIFILEMETADATACALLBACK
virtual nsresult Cancel() override;
};
/*******************************************************************************
@ -2537,7 +2550,7 @@ BackgroundRequestChild::BackgroundRequestChild(IDBRequest* aRequest)
: BackgroundRequestChildBase(aRequest),
mTransaction(aRequest->GetTransaction()),
mRunningPreprocessHelpers(0),
mCurrentModuleSetIndex(0),
mCurrentCloneDataIndex(0),
mPreprocessResultCode(NS_OK),
mGetAll(false) {
MOZ_ASSERT(mTransaction);
@ -2575,27 +2588,27 @@ void BackgroundRequestChild::MaybeSendContinue() {
}
void BackgroundRequestChild::OnPreprocessFinished(
uint32_t aModuleSetIndex, nsTArray<RefPtr<JS::WasmModule>>& aModuleSet) {
uint32_t aCloneDataIndex, UniquePtr<JSStructuredCloneData> aCloneData) {
AssertIsOnOwningThread();
MOZ_ASSERT(aModuleSetIndex < mPreprocessHelpers.Length());
MOZ_ASSERT(!aModuleSet.IsEmpty());
MOZ_ASSERT(mPreprocessHelpers[aModuleSetIndex]);
MOZ_ASSERT(mModuleSets[aModuleSetIndex].IsEmpty());
MOZ_ASSERT(aCloneDataIndex < mPreprocessHelpers.Length());
MOZ_ASSERT(aCloneData);
MOZ_ASSERT(mPreprocessHelpers[aCloneDataIndex]);
MOZ_ASSERT(!mCloneDatas[aCloneDataIndex]);
mModuleSets[aModuleSetIndex].SwapElements(aModuleSet);
mCloneDatas[aCloneDataIndex] = std::move(aCloneData);
MaybeSendContinue();
mPreprocessHelpers[aModuleSetIndex] = nullptr;
mPreprocessHelpers[aCloneDataIndex] = nullptr;
}
void BackgroundRequestChild::OnPreprocessFailed(uint32_t aModuleSetIndex,
void BackgroundRequestChild::OnPreprocessFailed(uint32_t aCloneDataIndex,
nsresult aErrorCode) {
AssertIsOnOwningThread();
MOZ_ASSERT(aModuleSetIndex < mPreprocessHelpers.Length());
MOZ_ASSERT(aCloneDataIndex < mPreprocessHelpers.Length());
MOZ_ASSERT(NS_FAILED(aErrorCode));
MOZ_ASSERT(mPreprocessHelpers[aModuleSetIndex]);
MOZ_ASSERT(mModuleSets[aModuleSetIndex].IsEmpty());
MOZ_ASSERT(mPreprocessHelpers[aCloneDataIndex]);
MOZ_ASSERT(!mCloneDatas[aCloneDataIndex]);
if (NS_SUCCEEDED(mPreprocessResultCode)) {
mPreprocessResultCode = aErrorCode;
@ -2603,18 +2616,18 @@ void BackgroundRequestChild::OnPreprocessFailed(uint32_t aModuleSetIndex,
MaybeSendContinue();
mPreprocessHelpers[aModuleSetIndex] = nullptr;
mPreprocessHelpers[aCloneDataIndex] = nullptr;
}
const nsTArray<RefPtr<JS::WasmModule>>*
BackgroundRequestChild::GetNextModuleSet(const StructuredCloneReadInfo& aInfo) {
if (!aInfo.mHasPreprocessInfo) {
return nullptr;
}
UniquePtr<JSStructuredCloneData> BackgroundRequestChild::GetNextCloneData() {
AssertIsOnOwningThread();
MOZ_ASSERT(mCurrentCloneDataIndex < mCloneDatas.Length());
MOZ_ASSERT(mCloneDatas[mCurrentCloneDataIndex]);
MOZ_ASSERT(mCurrentModuleSetIndex < mModuleSets.Length());
MOZ_ASSERT(!mModuleSets[mCurrentModuleSetIndex].IsEmpty());
return &mModuleSets[mCurrentModuleSetIndex++];
UniquePtr<JSStructuredCloneData> cloneData;
mCloneDatas[mCurrentCloneDataIndex++].swap(cloneData);
return cloneData;
}
void BackgroundRequestChild::HandleResponse(nsresult aResponse) {
@ -2653,9 +2666,14 @@ void BackgroundRequestChild::HandleResponse(
StructuredCloneReadInfo cloneReadInfo(std::move(serializedCloneInfo));
DeserializeStructuredCloneFiles(mTransaction->Database(), aResponse.files(),
GetNextModuleSet(cloneReadInfo),
/* aForPreprocess */ false,
cloneReadInfo.mFiles);
if (cloneReadInfo.mHasPreprocessInfo) {
UniquePtr<JSStructuredCloneData> cloneData = GetNextCloneData();
cloneReadInfo.mData = std::move(*cloneData);
}
ResultHelper helper(mRequest, mTransaction, &cloneReadInfo);
DispatchSuccessEvent(&helper);
@ -2687,9 +2705,14 @@ void BackgroundRequestChild::HandleResponse(
// Get the files
nsTArray<StructuredCloneFile> files;
DeserializeStructuredCloneFiles(database, serializedCloneInfo.files(),
GetNextModuleSet(*cloneReadInfo), files);
/* aForPreprocess */ false, files);
cloneReadInfo->mFiles = std::move(files);
if (cloneReadInfo->mHasPreprocessInfo) {
UniquePtr<JSStructuredCloneData> cloneData = GetNextCloneData();
cloneReadInfo->mData = std::move(*cloneData);
}
}
}
@ -2717,7 +2740,7 @@ void BackgroundRequestChild::HandleResponse(uint64_t aResponse) {
}
nsresult BackgroundRequestChild::HandlePreprocess(
const WasmModulePreprocessInfo& aPreprocessInfo) {
const PreprocessInfo& aPreprocessInfo) {
AssertIsOnOwningThread();
IDBDatabase* database = mTransaction->Database();
@ -2725,13 +2748,15 @@ nsresult BackgroundRequestChild::HandlePreprocess(
mPreprocessHelpers.SetLength(1);
nsTArray<StructuredCloneFile> files;
DeserializeStructuredCloneFiles(database, aPreprocessInfo.files(), nullptr,
files);
DeserializeStructuredCloneFiles(database, aPreprocessInfo.files(),
/* aForPreprocess */ true, files);
MOZ_ASSERT(files.Length() == 1);
RefPtr<PreprocessHelper>& preprocessHelper = mPreprocessHelpers[0];
preprocessHelper = new PreprocessHelper(0, this);
nsresult rv = preprocessHelper->Init(files);
nsresult rv = preprocessHelper->Init(files[0]);
if (NS_WARN_IF(NS_FAILED(rv))) {
return rv;
}
@ -2743,13 +2768,13 @@ nsresult BackgroundRequestChild::HandlePreprocess(
mRunningPreprocessHelpers++;
mModuleSets.SetLength(1);
mCloneDatas.SetLength(1);
return NS_OK;
}
nsresult BackgroundRequestChild::HandlePreprocess(
const nsTArray<WasmModulePreprocessInfo>& aPreprocessInfos) {
const nsTArray<PreprocessInfo>& aPreprocessInfos) {
AssertIsOnOwningThread();
IDBDatabase* database = mTransaction->Database();
@ -2762,16 +2787,18 @@ nsresult BackgroundRequestChild::HandlePreprocess(
// and has the potential to cause some annoying browser hiccups.
// Consider using a single thread or a very small threadpool.
for (uint32_t index = 0; index < count; index++) {
const WasmModulePreprocessInfo& preprocessInfo = aPreprocessInfos[index];
const PreprocessInfo& preprocessInfo = aPreprocessInfos[index];
nsTArray<StructuredCloneFile> files;
DeserializeStructuredCloneFiles(database, preprocessInfo.files(), nullptr,
files);
DeserializeStructuredCloneFiles(database, preprocessInfo.files(),
/* aForPreprocess */ true, files);
MOZ_ASSERT(files.Length() == 1);
RefPtr<PreprocessHelper>& preprocessHelper = mPreprocessHelpers[index];
preprocessHelper = new PreprocessHelper(index, this);
nsresult rv = preprocessHelper->Init(files);
nsresult rv = preprocessHelper->Init(files[0]);
if (NS_WARN_IF(NS_FAILED(rv))) {
return rv;
}
@ -2784,7 +2811,7 @@ nsresult BackgroundRequestChild::HandlePreprocess(
mRunningPreprocessHelpers++;
}
mModuleSets.SetLength(count);
mCloneDatas.SetLength(count);
mGetAll = true;
@ -2949,51 +2976,44 @@ mozilla::ipc::IPCResult BackgroundRequestChild::RecvPreprocess(
}
nsresult BackgroundRequestChild::PreprocessHelper::Init(
const nsTArray<StructuredCloneFile>& aFiles) {
const StructuredCloneFile& aFile) {
AssertIsOnOwningThread();
MOZ_ASSERT(!aFiles.IsEmpty());
MOZ_ASSERT(aFile.mBlob);
MOZ_ASSERT(aFile.mType == StructuredCloneFile::eStructuredClone);
MOZ_ASSERT(mState == State::Initial);
nsTArray<nsCOMPtr<nsIInputStream>> streams;
for (uint32_t index = 0; index < aFiles.Length(); index++) {
const StructuredCloneFile& bytecodeFile = aFiles[index];
// The stream transport service is used for asynchronous processing. It has a
// threadpool with a high cap of 25 threads. Fortunately, the service can be
// used on workers too.
nsCOMPtr<nsIEventTarget> target =
do_GetService(NS_STREAMTRANSPORTSERVICE_CONTRACTID);
MOZ_ASSERT(target);
MOZ_ASSERT(bytecodeFile.mType == StructuredCloneFile::eWasmBytecode);
MOZ_ASSERT(bytecodeFile.mBlob);
// We use a TaskQueue here in order to be sure that the events are dispatched
// in the correct order. This is not guaranteed in case we use the I/O thread
// directly.
mTaskQueue = new TaskQueue(target.forget());
mTaskQueueEventTarget = mTaskQueue->WrapAsEventTarget();
ErrorResult errorResult;
ErrorResult errorResult;
nsCOMPtr<nsIInputStream> bytecodeStream;
bytecodeFile.mBlob->CreateInputStream(getter_AddRefs(bytecodeStream),
errorResult);
if (NS_WARN_IF(errorResult.Failed())) {
return errorResult.StealNSResult();
}
streams.AppendElement(bytecodeStream);
nsCOMPtr<nsIInputStream> stream;
aFile.mBlob->CreateInputStream(getter_AddRefs(stream), errorResult);
if (NS_WARN_IF(errorResult.Failed())) {
return errorResult.StealNSResult();
}
mStreams = std::move(streams);
mStream = std::move(stream);
mCloneData = MakeUnique<JSStructuredCloneData>(
JS::StructuredCloneScope::DifferentProcessForIndexedDB);
return NS_OK;
}
nsresult BackgroundRequestChild::PreprocessHelper::Dispatch() {
AssertIsOnOwningThread();
if (!mTaskQueue) {
// The stream transport service is used for asynchronous processing. It has
// a threadpool with a high cap of 25 threads. Fortunately, the service can
// be used on workers too.
nsCOMPtr<nsIEventTarget> target =
do_GetService(NS_STREAMTRANSPORTSERVICE_CONTRACTID);
MOZ_ASSERT(target);
// We use a TaskQueue here in order to be sure that the events are
// dispatched in the correct order. This is not guaranteed in case we use
// the I/O thread directly.
mTaskQueue = new TaskQueue(target.forget());
mTaskQueueEventTarget = mTaskQueue->WrapAsEventTarget();
}
MOZ_ASSERT(mState == State::Initial);
nsresult rv = mTaskQueueEventTarget->Dispatch(this, NS_DISPATCH_NORMAL);
if (NS_WARN_IF(NS_FAILED(rv))) {
@ -3003,105 +3023,16 @@ nsresult BackgroundRequestChild::PreprocessHelper::Dispatch() {
return NS_OK;
}
void BackgroundRequestChild::PreprocessHelper::RunOnOwningThread() {
AssertIsOnOwningThread();
if (mActor) {
if (NS_SUCCEEDED(mResultCode)) {
mActor->OnPreprocessFinished(mModuleSetIndex, mModuleSet);
MOZ_ASSERT(mModuleSet.IsEmpty());
} else {
mActor->OnPreprocessFailed(mModuleSetIndex, mResultCode);
}
}
}
class MemUnmap {
uint32_t mSize = 0;
public:
MemUnmap() = default;
explicit MemUnmap(uint32_t aSize) : mSize(aSize) {}
void operator()(uint8_t* aP) {
MOZ_ASSERT(mSize);
PR_MemUnmap(aP, mSize);
}
};
using UniqueMapping = UniquePtr<uint8_t, MemUnmap>;
static UniqueMapping MapFile(PRFileDesc* aFile, PRFileInfo* aInfo) {
if (PR_GetOpenFileInfo(aFile, aInfo) != PR_SUCCESS) {
return nullptr;
}
PRFileMap* map = PR_CreateFileMap(aFile, aInfo->size, PR_PROT_READONLY);
if (!map) {
return nullptr;
}
// PRFileMap objects do not need to be kept alive after the memory has been
// mapped, so unconditionally close the PRFileMap, regardless of whether
// PR_MemMap succeeds.
uint8_t* memory = (uint8_t*)PR_MemMap(map, 0, aInfo->size);
PR_CloseFileMap(map);
return UniqueMapping(memory, MemUnmap(aInfo->size));
}
void BackgroundRequestChild::PreprocessHelper::ProcessCurrentStream() {
nsresult BackgroundRequestChild::PreprocessHelper::Start() {
MOZ_ASSERT(!IsOnOwningThread());
MOZ_ASSERT(!mStreams.IsEmpty());
MOZ_ASSERT(mStream);
MOZ_ASSERT(mState == State::Initial);
// We still don't have the current bytecode FileDesc.
if (!mCurrentBytecodeFileDesc) {
const nsCOMPtr<nsIInputStream>& bytecodeStream = mStreams[0];
MOZ_ASSERT(bytecodeStream);
nsresult rv;
mCurrentBytecodeFileDesc = GetFileDescriptorFromStream(bytecodeStream);
if (!mCurrentBytecodeFileDesc) {
nsresult rv = WaitForStreamReady(bytecodeStream);
if (NS_WARN_IF(NS_FAILED(rv))) {
ContinueWithStatus(rv);
}
return;
}
}
MOZ_ASSERT(mCurrentBytecodeFileDesc);
PRFileInfo bytecodeInfo;
UniqueMapping bytecodeMapping =
MapFile(mCurrentBytecodeFileDesc, &bytecodeInfo);
if (NS_WARN_IF(!bytecodeMapping)) {
ContinueWithStatus(NS_ERROR_FAILURE);
return;
}
RefPtr<JS::WasmModule> module =
JS::DeserializeWasmModule(bytecodeMapping.get(), bytecodeInfo.size);
if (NS_WARN_IF(!module)) {
ContinueWithStatus(NS_ERROR_FAILURE);
return;
}
mModuleSet.AppendElement(module);
mStreams.RemoveElementAt(0);
ContinueWithStatus(NS_OK);
}
nsresult BackgroundRequestChild::PreprocessHelper::WaitForStreamReady(
nsIInputStream* aInputStream) {
MOZ_ASSERT(!IsOnOwningThread());
MOZ_ASSERT(aInputStream);
nsCOMPtr<nsIAsyncFileMetadata> asyncFileMetadata =
do_QueryInterface(aInputStream);
if (asyncFileMetadata) {
nsresult rv =
asyncFileMetadata->AsyncFileMetadataWait(this, mTaskQueueEventTarget);
PRFileDesc* fileDesc = GetFileDescriptorFromStream(mStream);
if (fileDesc) {
rv = ProcessStream();
if (NS_WARN_IF(NS_FAILED(rv))) {
return rv;
}
@ -3109,12 +3040,24 @@ nsresult BackgroundRequestChild::PreprocessHelper::WaitForStreamReady(
return NS_OK;
}
nsCOMPtr<nsIAsyncInputStream> asyncStream = do_QueryInterface(aInputStream);
mState = State::WaitingForStreamReady;
nsCOMPtr<nsIAsyncFileMetadata> asyncFileMetadata = do_QueryInterface(mStream);
if (asyncFileMetadata) {
rv = asyncFileMetadata->AsyncFileMetadataWait(this, mTaskQueueEventTarget);
if (NS_WARN_IF(NS_FAILED(rv))) {
return rv;
}
return NS_OK;
}
nsCOMPtr<nsIAsyncInputStream> asyncStream = do_QueryInterface(mStream);
if (!asyncStream) {
return NS_ERROR_NO_INTERFACE;
}
nsresult rv = asyncStream->AsyncWait(this, 0, 0, mTaskQueueEventTarget);
rv = asyncStream->AsyncWait(this, 0, 0, mTaskQueueEventTarget);
if (NS_WARN_IF(NS_FAILED(rv))) {
return rv;
}
@ -3122,33 +3065,75 @@ nsresult BackgroundRequestChild::PreprocessHelper::WaitForStreamReady(
return NS_OK;
}
void BackgroundRequestChild::PreprocessHelper::ContinueWithStatus(
nsresult aStatus) {
nsresult BackgroundRequestChild::PreprocessHelper::ProcessStream() {
MOZ_ASSERT(!IsOnOwningThread());
MOZ_ASSERT(mStream);
MOZ_ASSERT(mState == State::Initial ||
mState == State::WaitingForStreamReady);
// Let's reset the value for the next operation.
mCurrentBytecodeFileDesc = nullptr;
// We need to get the internal stream (which is an nsFileInputStream) because
// SnappyUncompressInputStream doesn't support reading from async input
// streams.
nsCOMPtr<nsIEventTarget> eventTarget;
nsCOMPtr<mozIIPCBlobInputStream> blobInputStream = do_QueryInterface(mStream);
MOZ_ASSERT(blobInputStream);
if (NS_WARN_IF(NS_FAILED(aStatus))) {
// If the previous operation failed, we don't continue the processing of the
// other streams.
MOZ_ASSERT(mResultCode == NS_OK);
mResultCode = aStatus;
nsCOMPtr<nsIInputStream> internalInputStream =
blobInputStream->GetInternalStream();
MOZ_ASSERT(internalInputStream);
eventTarget = mOwningEventTarget;
} else if (mStreams.IsEmpty()) {
// If all the streams have been processed, we can go back to the owning
// thread.
eventTarget = mOwningEventTarget;
} else {
// Continue the processing.
eventTarget = mTaskQueueEventTarget;
RefPtr<SnappyUncompressInputStream> snappyInputStream =
new SnappyUncompressInputStream(internalInputStream);
nsresult rv;
do {
char buffer[kFileCopyBufferSize];
uint32_t numRead;
rv = snappyInputStream->Read(buffer, sizeof(buffer), &numRead);
if (NS_WARN_IF(NS_FAILED(rv))) {
break;
}
if (!numRead) {
break;
}
if (NS_WARN_IF(!mCloneData->AppendBytes(buffer, numRead))) {
rv = NS_ERROR_OUT_OF_MEMORY;
break;
}
} while (true);
if (NS_WARN_IF(NS_FAILED(rv))) {
return rv;
}
nsresult rv = eventTarget->Dispatch(this, NS_DISPATCH_NORMAL);
Unused << NS_WARN_IF(NS_FAILED(rv));
mState = State::Finishing;
rv = mOwningEventTarget->Dispatch(this, NS_DISPATCH_NORMAL);
if (NS_WARN_IF(NS_FAILED(rv))) {
return rv;
}
return NS_OK;
}
void BackgroundRequestChild::PreprocessHelper::Finish() {
AssertIsOnOwningThread();
if (mActor) {
if (NS_SUCCEEDED(mResultCode)) {
mActor->OnPreprocessFinished(mCloneDataIndex, std::move(mCloneData));
MOZ_ASSERT(!mCloneData);
} else {
mActor->OnPreprocessFailed(mCloneDataIndex, mResultCode);
}
}
mState = State::Completed;
}
NS_IMPL_ISUPPORTS_INHERITED(BackgroundRequestChild::PreprocessHelper,
@ -3157,10 +3142,40 @@ NS_IMPL_ISUPPORTS_INHERITED(BackgroundRequestChild::PreprocessHelper,
NS_IMETHODIMP
BackgroundRequestChild::PreprocessHelper::Run() {
if (IsOnOwningThread()) {
RunOnOwningThread();
} else {
ProcessCurrentStream();
nsresult rv;
switch (mState) {
case State::Initial:
rv = Start();
break;
case State::WaitingForStreamReady:
rv = ProcessStream();
break;
case State::Finishing:
Finish();
return NS_OK;
default:
MOZ_CRASH("Bad state!");
}
if (NS_WARN_IF(NS_FAILED(rv)) && mState != State::Finishing) {
if (NS_SUCCEEDED(mResultCode)) {
mResultCode = rv;
}
// Must set mState before dispatching otherwise we will race with the owning
// thread.
mState = State::Finishing;
if (IsOnOwningThread()) {
Finish();
} else {
MOZ_ALWAYS_SUCCEEDS(
mOwningEventTarget->Dispatch(this, NS_DISPATCH_NORMAL));
}
}
return NS_OK;
@ -3169,42 +3184,25 @@ BackgroundRequestChild::PreprocessHelper::Run() {
NS_IMETHODIMP
BackgroundRequestChild::PreprocessHelper::OnInputStreamReady(
nsIAsyncInputStream* aStream) {
return DataIsReady(aStream);
MOZ_ASSERT(!IsOnOwningThread());
MOZ_ASSERT(mState == State::WaitingForStreamReady);
MOZ_ALWAYS_SUCCEEDS(this->Run());
return NS_OK;
}
NS_IMETHODIMP
BackgroundRequestChild::PreprocessHelper::OnFileMetadataReady(
nsIAsyncFileMetadata* aObject) {
nsCOMPtr<nsIInputStream> stream = do_QueryInterface(aObject);
MOZ_ASSERT(stream, "It was a stream before!");
return DataIsReady(stream);
}
nsresult BackgroundRequestChild::PreprocessHelper::DataIsReady(
nsIInputStream* aStream) {
MOZ_ASSERT(!IsOnOwningThread());
MOZ_ASSERT(aStream);
MOZ_ASSERT(!mStreams.IsEmpty());
MOZ_ASSERT(mState == State::WaitingForStreamReady);
// We still don't have the current bytecode FileDesc.
if (!mCurrentBytecodeFileDesc) {
mCurrentBytecodeFileDesc = GetFileDescriptorFromStream(aStream);
if (!mCurrentBytecodeFileDesc) {
ContinueWithStatus(NS_ERROR_FAILURE);
return NS_OK;
}
MOZ_ALWAYS_SUCCEEDS(this->Run());
// Let's continue with the processing of the current stream.
ProcessCurrentStream();
return NS_OK;
}
MOZ_CRASH("If we have both fileDescs why are we here?");
return NS_OK;
}
nsresult BackgroundRequestChild::PreprocessHelper::Cancel() { return NS_OK; }
/*******************************************************************************
* BackgroundCursorChild
******************************************************************************/
@ -3369,9 +3367,9 @@ void BackgroundCursorChild::HandleResponse(
StructuredCloneReadInfo cloneReadInfo(std::move(response.cloneInfo()));
cloneReadInfo.mDatabase = mTransaction->Database();
DeserializeStructuredCloneFiles(mTransaction->Database(),
response.cloneInfo().files(), nullptr,
cloneReadInfo.mFiles);
DeserializeStructuredCloneFiles(
mTransaction->Database(), response.cloneInfo().files(),
/* aForPreprocess */ false, cloneReadInfo.mFiles);
RefPtr<IDBCursor> newCursor;
@ -3428,9 +3426,9 @@ void BackgroundCursorChild::HandleResponse(
StructuredCloneReadInfo cloneReadInfo(std::move(response.cloneInfo()));
cloneReadInfo.mDatabase = mTransaction->Database();
DeserializeStructuredCloneFiles(mTransaction->Database(),
aResponse.cloneInfo().files(), nullptr,
cloneReadInfo.mFiles);
DeserializeStructuredCloneFiles(
mTransaction->Database(), aResponse.cloneInfo().files(),
/* aForPreprocess */ false, cloneReadInfo.mFiles);
RefPtr<IDBCursor> newCursor;

View File

@ -30,10 +30,6 @@
class nsIEventTarget;
struct nsID;
namespace JS {
struct WasmModule;
} // namespace JS
namespace mozilla {
namespace ipc {
@ -570,9 +566,9 @@ class BackgroundRequestChild final : public BackgroundRequestChildBase,
RefPtr<IDBTransaction> mTransaction;
nsTArray<RefPtr<PreprocessHelper>> mPreprocessHelpers;
nsTArray<nsTArray<RefPtr<JS::WasmModule>>> mModuleSets;
nsTArray<UniquePtr<JSStructuredCloneData>> mCloneDatas;
uint32_t mRunningPreprocessHelpers;
uint32_t mCurrentModuleSetIndex;
uint32_t mCurrentCloneDataIndex;
nsresult mPreprocessResultCode;
bool mGetAll;
@ -586,13 +582,12 @@ class BackgroundRequestChild final : public BackgroundRequestChildBase,
void MaybeSendContinue();
void OnPreprocessFinished(uint32_t aModuleSetIndex,
nsTArray<RefPtr<JS::WasmModule>>& aModuleSet);
void OnPreprocessFinished(uint32_t aCloneDataIndex,
UniquePtr<JSStructuredCloneData> aCloneData);
void OnPreprocessFailed(uint32_t aModuleSetIndex, nsresult aErrorCode);
const nsTArray<RefPtr<JS::WasmModule>>* GetNextModuleSet(
const StructuredCloneReadInfo& aInfo);
UniquePtr<JSStructuredCloneData> GetNextCloneData();
void HandleResponse(nsresult aResponse);
@ -609,10 +604,9 @@ class BackgroundRequestChild final : public BackgroundRequestChildBase,
void HandleResponse(uint64_t aResponse);
nsresult HandlePreprocess(const WasmModulePreprocessInfo& aPreprocessInfo);
nsresult HandlePreprocess(const PreprocessInfo& aPreprocessInfo);
nsresult HandlePreprocess(
const nsTArray<WasmModulePreprocessInfo>& aPreprocessInfos);
nsresult HandlePreprocess(const nsTArray<PreprocessInfo>& aPreprocessInfos);
// IPDL methods are only called by IPDL.
virtual void ActorDestroy(ActorDestroyReason aWhy) override;

View File

@ -8454,10 +8454,9 @@ nsresult DeserializeStructuredCloneFile(FileManager* aFileManager,
return NS_OK;
}
nsresult DeserializeStructuredCloneFiles(FileManager* aFileManager,
const nsAString& aText,
nsTArray<StructuredCloneFile>& aResult,
bool* aHasPreprocessInfo) {
nsresult DeserializeStructuredCloneFiles(
FileManager* aFileManager, const nsAString& aText,
nsTArray<StructuredCloneFile>& aResult) {
MOZ_ASSERT(!IsOnBackgroundThread());
nsCharSeparatedTokenizerTemplate<TokenizerIgnoreNothing> tokenizer(aText,
@ -8475,20 +8474,6 @@ nsresult DeserializeStructuredCloneFiles(FileManager* aFileManager,
if (NS_WARN_IF(NS_FAILED(rv))) {
return rv;
}
if (!aHasPreprocessInfo) {
continue;
}
if (file->mType == StructuredCloneFile::eWasmBytecode) {
*aHasPreprocessInfo = true;
} else if (file->mType == StructuredCloneFile::eWasmCompiled) {
MOZ_ASSERT(aResult.Length() > 1);
MOZ_ASSERT(aResult[aResult.Length() - 2].mType ==
StructuredCloneFile::eWasmBytecode);
*aHasPreprocessInfo = true;
}
}
return NS_OK;
@ -8540,7 +8525,7 @@ nsresult SerializeStructuredCloneFiles(
for (uint32_t index = 0; index < count; index++) {
const StructuredCloneFile& file = aFiles[index];
if (aForPreprocess && file.mType != StructuredCloneFile::eWasmBytecode) {
if (aForPreprocess && file.mType != StructuredCloneFile::eStructuredClone) {
continue;
}
@ -8615,23 +8600,12 @@ nsresult SerializeStructuredCloneFiles(
}
case StructuredCloneFile::eStructuredClone: {
SerializedStructuredCloneFile* file = aResult.AppendElement(fallible);
MOZ_ASSERT(file);
file->file() = null_t();
file->type() = StructuredCloneFile::eStructuredClone;
break;
}
case StructuredCloneFile::eWasmBytecode: {
if (!aForPreprocess) {
SerializedStructuredCloneFile* serializedFile =
aResult.AppendElement(fallible);
MOZ_ASSERT(serializedFile);
SerializedStructuredCloneFile* file = aResult.AppendElement(fallible);
MOZ_ASSERT(file);
serializedFile->file() = null_t();
serializedFile->type() = StructuredCloneFile::eWasmBytecode;
file->file() = null_t();
file->type() = StructuredCloneFile::eStructuredClone;
} else {
RefPtr<FileBlobImpl> impl = new FileBlobImpl(nativeFile);
impl->SetFileId(file.mFileInfo->Id());
@ -8650,7 +8624,7 @@ nsresult SerializeStructuredCloneFiles(
MOZ_ASSERT(serializedFile);
serializedFile->file() = ipcBlob;
serializedFile->type() = StructuredCloneFile::eWasmBytecode;
serializedFile->type() = StructuredCloneFile::eStructuredClone;
aDatabase->MapBlob(ipcBlob, file.mFileInfo);
}
@ -8658,13 +8632,20 @@ nsresult SerializeStructuredCloneFiles(
break;
}
case StructuredCloneFile::eWasmBytecode:
case StructuredCloneFile::eWasmCompiled: {
SerializedStructuredCloneFile* serializedFile =
aResult.AppendElement(fallible);
MOZ_ASSERT(serializedFile);
// Set file() to null, support for storing WebAssembly.Modules has been
// removed in bug 1469395. Support for de-serialization of
// WebAssembly.Modules modules has been removed in bug 1561876. Full
// removal is tracked in bug 1487479.
serializedFile->file() = null_t();
serializedFile->type() = StructuredCloneFile::eWasmCompiled;
serializedFile->type() = file.mType;
break;
}
@ -10358,7 +10339,7 @@ nsresult DatabaseConnection::UpdateRefcountFunction::ProcessValue(
}
nsTArray<StructuredCloneFile> files;
rv = DeserializeStructuredCloneFiles(mFileManager, ids, files, nullptr);
rv = DeserializeStructuredCloneFiles(mFileManager, ids, files);
if (NS_WARN_IF(NS_FAILED(rv))) {
return rv;
}
@ -13776,23 +13757,8 @@ bool TransactionBase::VerifyRequestParams(
}
case StructuredCloneFile::eStructuredClone:
ASSERT_UNLESS_FUZZING();
return false;
case StructuredCloneFile::eWasmBytecode:
case StructuredCloneFile::eWasmCompiled:
if (NS_WARN_IF(
file.type() !=
DatabaseOrMutableFile::TPBackgroundIDBDatabaseFileParent)) {
ASSERT_UNLESS_FUZZING();
return false;
}
if (NS_WARN_IF(!file.get_PBackgroundIDBDatabaseFileParent())) {
ASSERT_UNLESS_FUZZING();
return false;
}
break;
case StructuredCloneFile::eEndGuard:
ASSERT_UNLESS_FUZZING();
return false;
@ -18219,8 +18185,8 @@ nsresult DatabaseOperationBase::GetStructuredCloneReadInfoFromBlob(
}
if (!aFileIds.IsVoid()) {
nsresult rv = DeserializeStructuredCloneFiles(
aFileManager, aFileIds, aInfo->mFiles, &aInfo->mHasPreprocessInfo);
nsresult rv =
DeserializeStructuredCloneFiles(aFileManager, aFileIds, aInfo->mFiles);
if (NS_WARN_IF(NS_FAILED(rv))) {
return rv;
}
@ -18243,8 +18209,7 @@ nsresult DatabaseOperationBase::GetStructuredCloneReadInfoFromExternalBlob(
nsresult rv;
if (!aFileIds.IsVoid()) {
rv = DeserializeStructuredCloneFiles(aFileManager, aFileIds, aInfo->mFiles,
&aInfo->mHasPreprocessInfo);
rv = DeserializeStructuredCloneFiles(aFileManager, aFileIds, aInfo->mFiles);
if (NS_WARN_IF(NS_FAILED(rv))) {
return rv;
}
@ -18259,6 +18224,11 @@ nsresult DatabaseOperationBase::GetStructuredCloneReadInfoFromExternalBlob(
return NS_ERROR_UNEXPECTED;
}
if (IndexedDatabaseManager::PreprocessingEnabled()) {
aInfo->mHasPreprocessInfo = true;
return NS_OK;
}
StructuredCloneFile& file = aInfo->mFiles[index];
MOZ_ASSERT(file.mFileInfo);
MOZ_ASSERT(file.mType == StructuredCloneFile::eStructuredClone);
@ -24132,9 +24102,7 @@ bool ObjectStoreAddOrPutRequestOp::Init(TransactionBase* aTransaction) {
const FileAddInfo& fileAddInfo = fileAddInfos[index];
MOZ_ASSERT(fileAddInfo.type() == StructuredCloneFile::eBlob ||
fileAddInfo.type() == StructuredCloneFile::eMutableFile ||
fileAddInfo.type() == StructuredCloneFile::eWasmBytecode ||
fileAddInfo.type() == StructuredCloneFile::eWasmCompiled);
fileAddInfo.type() == StructuredCloneFile::eMutableFile);
const DatabaseOrMutableFile& file = fileAddInfo.file();
@ -24172,22 +24140,6 @@ bool ObjectStoreAddOrPutRequestOp::Init(TransactionBase* aTransaction) {
break;
}
case StructuredCloneFile::eWasmBytecode:
case StructuredCloneFile::eWasmCompiled: {
MOZ_ASSERT(file.type() ==
DatabaseOrMutableFile::TPBackgroundIDBDatabaseFileParent);
storedFileInfo->mFileActor = static_cast<DatabaseFile*>(
file.get_PBackgroundIDBDatabaseFileParent());
MOZ_ASSERT(storedFileInfo->mFileActor);
storedFileInfo->mFileInfo = storedFileInfo->mFileActor->GetFileInfo();
MOZ_ASSERT(storedFileInfo->mFileInfo);
storedFileInfo->mType = fileAddInfo.type();
break;
}
default:
MOZ_CRASH("Should never get here!");
}
@ -24651,8 +24603,8 @@ void MoveData<SerializedStructuredCloneReadInfo>(
}
template <>
void MoveData<WasmModulePreprocessInfo>(StructuredCloneReadInfo& aInfo,
WasmModulePreprocessInfo& aResult) {}
void MoveData<PreprocessInfo>(StructuredCloneReadInfo& aInfo,
PreprocessInfo& aResult) {}
template <bool aForPreprocess, typename T>
nsresult ObjectStoreGetRequestOp::ConvertResponse(
@ -24761,7 +24713,7 @@ nsresult ObjectStoreGetRequestOp::GetPreprocessParams(
if (mGetAll) {
aParams = ObjectStoreGetAllPreprocessParams();
FallibleTArray<WasmModulePreprocessInfo> falliblePreprocessInfos;
FallibleTArray<PreprocessInfo> falliblePreprocessInfos;
if (NS_WARN_IF(!falliblePreprocessInfos.SetLength(mPreprocessInfoCount,
fallible))) {
return NS_ERROR_OUT_OF_MEMORY;
@ -24781,7 +24733,7 @@ nsresult ObjectStoreGetRequestOp::GetPreprocessParams(
}
}
nsTArray<WasmModulePreprocessInfo>& preprocessInfos =
nsTArray<PreprocessInfo>& preprocessInfos =
aParams.get_ObjectStoreGetAllPreprocessParams().preprocessInfos();
falliblePreprocessInfos.SwapElements(preprocessInfos);
@ -24791,7 +24743,7 @@ nsresult ObjectStoreGetRequestOp::GetPreprocessParams(
aParams = ObjectStoreGetPreprocessParams();
WasmModulePreprocessInfo& preprocessInfo =
PreprocessInfo& preprocessInfo =
aParams.get_ObjectStoreGetPreprocessParams().preprocessInfo();
nsresult rv = ConvertResponse<true>(mResponse[0], preprocessInfo);

View File

@ -689,26 +689,16 @@ class ValueDeserializationHelper {
MOZ_ASSERT(aFile.mType == StructuredCloneFile::eWasmBytecode);
MOZ_ASSERT(!aFile.mBlob);
// If we don't have a WasmModule, we are probably using it for an index
// creation, but Wasm module can't be used in index creation, so just make a
// dummy object.
if (!aFile.mWasmModule) {
JS::Rooted<JSObject*> obj(aCx, JS_NewPlainObject(aCx));
// Just create a plain object here, support for de-serialization of
// WebAssembly.Modules has been removed in bug 1561876. Full removal is
// tracked in bug 1487479.
if (NS_WARN_IF(!obj)) {
return false;
}
aResult.set(obj);
return true;
}
JS::Rooted<JSObject*> moduleObj(aCx, aFile.mWasmModule->createObject(aCx));
if (NS_WARN_IF(!moduleObj)) {
JS::Rooted<JSObject*> obj(aCx, JS_NewPlainObject(aCx));
if (NS_WARN_IF(!obj)) {
return false;
}
aResult.set(moduleObj);
aResult.set(obj);
return true;
}
};
@ -804,7 +794,7 @@ JSObject* CopyingStructuredCloneReadCallback(JSContext* aCx,
MOZ_ASSERT(aTag != SCTAG_DOM_FILE_WITHOUT_LASTMODIFIEDDATE);
if (aTag == SCTAG_DOM_BLOB || aTag == SCTAG_DOM_FILE ||
aTag == SCTAG_DOM_MUTABLEFILE || aTag == SCTAG_DOM_WASM) {
aTag == SCTAG_DOM_MUTABLEFILE) {
auto* cloneInfo =
static_cast<IDBObjectStore::StructuredCloneInfo*>(aClosure);
@ -854,28 +844,14 @@ JSObject* CopyingStructuredCloneReadCallback(JSContext* aCx,
return result;
}
if (aTag == SCTAG_DOM_MUTABLEFILE) {
MOZ_ASSERT(file.mType == StructuredCloneFile::eMutableFile);
MOZ_ASSERT(file.mType == StructuredCloneFile::eMutableFile);
JS::Rooted<JS::Value> wrappedMutableFile(aCx);
if (NS_WARN_IF(!ToJSValue(aCx, file.mMutableFile, &wrappedMutableFile))) {
return nullptr;
}
result.set(&wrappedMutableFile.toObject());
return result;
}
MOZ_ASSERT(file.mType == StructuredCloneFile::eWasmBytecode);
JS::Rooted<JSObject*> wrappedModule(aCx,
file.mWasmModule->createObject(aCx));
if (NS_WARN_IF(!wrappedModule)) {
JS::Rooted<JS::Value> wrappedMutableFile(aCx);
if (NS_WARN_IF(!ToJSValue(aCx, file.mMutableFile, &wrappedMutableFile))) {
return nullptr;
}
result.set(wrappedModule);
result.set(&wrappedMutableFile.toObject());
return result;
}

View File

@ -11,10 +11,6 @@
#include "nsCOMPtr.h"
#include "nsTArray.h"
namespace JS {
struct WasmModule;
} // namespace JS
namespace mozilla {
namespace dom {
@ -39,7 +35,6 @@ struct StructuredCloneFile {
RefPtr<Blob> mBlob;
RefPtr<IDBMutableFile> mMutableFile;
RefPtr<JS::WasmModule> mWasmModule;
RefPtr<FileInfo> mFileInfo;
FileType mType;

View File

@ -123,6 +123,7 @@ const char kPrefMaxSerilizedMsgSize[] =
IDB_PREF_BRANCH_ROOT "maxSerializedMsgSize";
const char kPrefErrorEventToSelfError[] =
IDB_PREF_BRANCH_ROOT "errorEventToSelfError";
const char kPreprocessingPref[] = IDB_PREF_BRANCH_ROOT "preprocessing";
#define IDB_PREF_LOGGING_BRANCH_ROOT IDB_PREF_BRANCH_ROOT "logging."
@ -147,6 +148,7 @@ Atomic<bool> gFileHandleEnabled(false);
Atomic<bool> gPrefErrorEventToSelfError(false);
Atomic<int32_t> gDataThresholdBytes(0);
Atomic<int32_t> gMaxSerializedMsgSize(0);
Atomic<bool> gPreprocessingEnabled(false);
void AtomicBoolPrefChangedCallback(const char* aPrefName,
Atomic<bool>* aClosure) {
@ -281,6 +283,10 @@ nsresult IndexedDatabaseManager::Init() {
Preferences::RegisterCallbackAndCall(MaxSerializedMsgSizePrefChangeCallback,
kPrefMaxSerilizedMsgSize);
Preferences::RegisterCallbackAndCall(AtomicBoolPrefChangedCallback,
kPreprocessingPref,
&gPreprocessingEnabled);
nsAutoCString acceptLang;
Preferences::GetLocalizedCString("intl.accept_languages", acceptLang);
@ -336,6 +342,9 @@ void IndexedDatabaseManager::Destroy() {
Preferences::UnregisterCallback(MaxSerializedMsgSizePrefChangeCallback,
kPrefMaxSerilizedMsgSize);
Preferences::UnregisterCallback(AtomicBoolPrefChangedCallback,
kPreprocessingPref, &gPreprocessingEnabled);
delete this;
}
@ -617,6 +626,15 @@ uint32_t IndexedDatabaseManager::MaxSerializedMsgSize() {
return gMaxSerializedMsgSize;
}
// static
bool IndexedDatabaseManager::PreprocessingEnabled() {
MOZ_ASSERT(gDBManager,
"PreprocessingEnabled() called before indexedDB has been "
"initialized!");
return gPreprocessingEnabled;
}
void IndexedDatabaseManager::ClearBackgroundActor() {
MOZ_ASSERT(NS_IsMainThread());

View File

@ -95,6 +95,8 @@ class IndexedDatabaseManager final {
static uint32_t MaxSerializedMsgSize();
static bool PreprocessingEnabled();
void ClearBackgroundActor();
already_AddRefed<FileManager> GetFileManager(PersistenceType aPersistenceType,

View File

@ -109,19 +109,19 @@ union RequestResponse
IndexCountResponse;
};
struct WasmModulePreprocessInfo
struct PreprocessInfo
{
SerializedStructuredCloneFile[] files;
};
struct ObjectStoreGetPreprocessParams
{
WasmModulePreprocessInfo preprocessInfo;
PreprocessInfo preprocessInfo;
};
struct ObjectStoreGetAllPreprocessParams
{
WasmModulePreprocessInfo[] preprocessInfos;
PreprocessInfo[] preprocessInfos;
};
union PreprocessParams

View File

@ -16,11 +16,26 @@ function* testSteps() {
const viewData = { key: 1, view: getRandomView(100000) };
for (let external of [false, true]) {
if (external) {
info("Setting data threshold pref");
const tests = [
{
external: false,
preprocessing: false,
},
{
external: true,
preprocessing: false,
},
{
external: true,
preprocessing: true,
},
];
for (let test of tests) {
if (test.external) {
if (this.window) {
info("Setting data threshold pref");
SpecialPowers.pushPrefEnv(
{ set: [["dom.indexedDB.dataThreshold", 0]] },
continueToNextStep
@ -31,6 +46,20 @@ function* testSteps() {
}
}
if (test.preprocessing) {
if (this.window) {
info("Setting preprocessing pref");
SpecialPowers.pushPrefEnv(
{ set: [["dom.indexedDB.preprocessing", true]] },
continueToNextStep
);
yield undefined;
} else {
enablePreprocessing();
}
}
info("Opening database");
let request = indexedDB.open(name);
@ -88,7 +117,7 @@ function* testSteps() {
getCurrentUsage(grabFileUsageAndContinueHandler);
let fileUsage = yield undefined;
if (external) {
if (test.external) {
ok(fileUsage > 0, "File usage is not zero");
} else {
ok(fileUsage == 0, "File usage is zero");
@ -100,6 +129,21 @@ function* testSteps() {
request.onerror = errorHandler;
request.onsuccess = continueToNextStepSync;
yield undefined;
if (this.window) {
info("Resetting prefs");
SpecialPowers.popPrefEnv(continueToNextStep);
yield undefined;
} else {
if (test.external) {
resetDataThreshold();
}
if (test.preprocessing) {
resetPreprocessing();
}
}
}
finishTest();

View File

@ -0,0 +1,58 @@
/**
* Any copyright is dedicated to the Public Domain.
* http://creativecommons.org/publicdomain/zero/1.0/
*/
var testGenerator = testSteps();
function* testSteps() {
const name = "test_wasm_recompile.js";
const objectStoreName = "Wasm";
const wasmData = { key: 1 };
// The goal of this test is to prove that stored wasm is never deserialized.
info("Installing profile");
clearAllDatabases(continueToNextStepSync);
yield undefined;
// The profile was created with a mythical build (buildId: 20180309213541,
// cpuId: X64=0x2). It contains one stored wasm module (file id 1 - bytecode
// and file id 2 - compiled/machine code). The file create_db.js in the
// package was run locally (specifically it was temporarily added to
// xpcshell-parent-process.ini and then executed:
// mach xpcshell-test dom/indexedDB/test/unit/create_db.js
installPackagedProfile("wasm_get_values_profile");
info("Opening database");
let request = indexedDB.open(name);
request.onerror = errorHandler;
request.onupgradeneeded = unexpectedSuccessHandler;
request.onsuccess = continueToNextStepSync;
yield undefined;
// success
let db = request.result;
db.onerror = errorHandler;
info("Getting wasm");
request = db
.transaction([objectStoreName])
.objectStore(objectStoreName)
.get(wasmData.key);
request.onsuccess = continueToNextStepSync;
yield undefined;
info("Verifying wasm");
let isWasmModule = request.result instanceof WebAssembly.Module;
ok(!isWasmModule, "Object is not wasm module");
finishTest();
yield undefined;
}

View File

@ -1,140 +0,0 @@
/**
* Any copyright is dedicated to the Public Domain.
* http://creativecommons.org/publicdomain/zero/1.0/
*/
var testGenerator = testSteps();
function* testSteps() {
const name = "test_wasm_recompile.js";
const objectStoreName = "Wasm";
const wasmData = { key: 1, wasm: null };
// The goal of this test is to prove that wasm is recompiled and the on-disk
// copy updated.
if (!isWasmSupported()) {
finishTest();
yield undefined;
}
getWasmBinary(
'(module (func $f (result i32) (i32.const 42)) (func (export "run") (result i32) (call $f)))'
);
let binary = yield undefined;
wasmData.wasm = getWasmModule(binary);
info("Installing profile");
clearAllDatabases(continueToNextStepSync);
yield undefined;
// The profile was created with a mythical build (buildId: 20180309213541,
// cpuId: X64=0x2). It contains one stored wasm module (file id 1 - bytecode
// and file id 2 - compiled/machine code). The file create_db.js in the
// package was run locally (specifically it was temporarily added to
// xpcshell-parent-process.ini and then executed:
// mach xpcshell-test dom/indexedDB/test/unit/create_db.js
installPackagedProfile("wasm_recompile_profile");
let filesDir = getChromeFilesDir();
let file = filesDir.clone();
file.append("2");
info("Reading out contents of compiled blob");
File.createFromNsIFile(file).then(grabEventAndContinueHandler);
let domFile = yield undefined;
let fileReader = new FileReader();
fileReader.onload = continueToNextStepSync;
fileReader.readAsArrayBuffer(domFile);
yield undefined;
let compiledBuffer = fileReader.result;
info("Opening database");
let request = indexedDB.open(name);
request.onerror = errorHandler;
request.onupgradeneeded = unexpectedSuccessHandler;
request.onsuccess = continueToNextStepSync;
yield undefined;
// success
let db = request.result;
db.onerror = errorHandler;
info("Getting wasm");
request = db
.transaction([objectStoreName])
.objectStore(objectStoreName)
.get(wasmData.key);
request.onsuccess = continueToNextStepSync;
yield undefined;
info("Verifying wasm module");
verifyWasmModule(request.result, wasmData.wasm);
yield undefined;
info("Reading out contents of new compiled blob");
File.createFromNsIFile(file).then(grabEventAndContinueHandler);
domFile = yield undefined;
fileReader = new FileReader();
fileReader.onload = continueToNextStepSync;
fileReader.readAsArrayBuffer(domFile);
yield undefined;
let newCompiledBuffer = fileReader.result;
info("Verifying that re-storing of re-compiled code has been disabled");
ok(compareBuffers(newCompiledBuffer, compiledBuffer), "Blobs don't differ");
info("Getting wasm again");
request = db
.transaction([objectStoreName])
.objectStore(objectStoreName)
.get(wasmData.key);
request.onsuccess = continueToNextStepSync;
yield undefined;
info("Verifying wasm module");
verifyWasmModule(request.result, wasmData.wasm);
yield undefined;
info("Reading out contents of new compiled blob again");
File.createFromNsIFile(file).then(grabEventAndContinueHandler);
domFile = yield undefined;
fileReader = new FileReader();
fileReader.onload = continueToNextStepSync;
fileReader.readAsArrayBuffer(domFile);
yield undefined;
let newCompiledBuffer2 = fileReader.result;
info("Verifying blob didn't change");
ok(
compareBuffers(newCompiledBuffer2, newCompiledBuffer),
"Blob didn't change"
);
finishTest();
yield undefined;
}

View File

@ -494,16 +494,6 @@ function verifyView(view1, view2) {
continueToNextStep();
}
function verifyWasmModule(module1, module2) {
// We assume the given modules have no imports and export a single function
// named 'run'.
var instance1 = new WebAssembly.Instance(module1);
var instance2 = new WebAssembly.Instance(module2);
is(instance1.exports.run(), instance2.exports.run(), "same run() result");
continueToNextStep();
}
function grabFileUsageAndContinueHandler(request) {
testGenerator.next(request.result.fileUsage);
}
@ -531,11 +521,26 @@ function setDataThreshold(threshold) {
SpecialPowers.setIntPref("dom.indexedDB.dataThreshold", threshold);
}
function resetDataThreshold() {
info("Clearing data threshold pref");
SpecialPowers.clearUserPref("dom.indexedDB.dataThreshold");
}
function setMaxSerializedMsgSize(aSize) {
info("Setting maximal size of a serialized message to " + aSize);
SpecialPowers.setIntPref("dom.indexedDB.maxSerializedMsgSize", aSize);
}
function enablePreprocessing() {
info("Setting preprocessing pref");
SpecialPowers.setBoolPref("dom.indexedDB.preprocessing", true);
}
function resetPreprocessing() {
info("Clearing preprocessing pref");
SpecialPowers.clearUserPref("dom.indexedDB.preprocessing");
}
function getPrincipal(url) {
let uri = Services.io.newURI(url);
return Services.scriptSecurityManager.createContentPrincipal(uri, {});

View File

@ -26,7 +26,7 @@ support-files =
schema23upgrade_profile.zip
snappyUpgrade_profile.zip
storagePersistentUpgrade_profile.zip
wasm_recompile_profile.zip
wasm_get_values_profile.zip
xpcshell-shared.ini
[include:xpcshell-shared.ini]
@ -66,5 +66,6 @@ skip-if = os == "android"
# bug 951017: intermittent failure on Android x86 emulator
skip-if = os == "android" && processor == "x86"
[test_unexpectedDirectory.js]
[test_view_put_get_values.js]
[test_wasm_get_values.js]
[test_wasm_put_get_values.js]
[test_wasm_recompile.js]

View File

@ -3018,5 +3018,3 @@ void MediaFormatReader::OnFirstDemuxFailed(TrackInfo::TrackType aType,
} // namespace mozilla
#undef NS_DispatchToMainThread
#undef LOGV
#undef LOG

View File

@ -172,6 +172,10 @@ class nsMainThreadPtrHolder<
namespace mozilla {
#ifdef LOG
# undef LOG
#endif
LazyLogModule gMediaManagerLog("MediaManager");
#define LOG(...) MOZ_LOG(gMediaManagerLog, LogLevel::Debug, (__VA_ARGS__))
@ -4615,6 +4619,4 @@ void GetUserMediaWindowListener::NotifyChrome() {
}));
}
#undef LOG
} // namespace mozilla

View File

@ -40,6 +40,10 @@
#include "nsProxyRelease.h"
#include "nsTArray.h"
#ifdef LOG
# undef LOG
#endif
mozilla::LazyLogModule gMediaRecorderLog("MediaRecorder");
#define LOG(type, msg) MOZ_LOG(gMediaRecorderLog, type, msg)
@ -197,15 +201,72 @@ NS_IMPL_RELEASE_INHERITED(MediaRecorder, DOMEventTargetHelper)
* Therefore, the reference dependency in gecko is:
* ShutdownBlocker -> Session <-> MediaRecorder, note that there is a cycle
* reference between Session and MediaRecorder.
* 2) A Session is destroyed after MediaRecorder::Stop has been called _and_ all
* encoded media data has been passed to OnDataAvailable handler. 3)
* MediaRecorder::Stop is called by user or the document is going to inactive or
* invisible.
* 2) A Session is destroyed in DestroyRunnable after MediaRecorder::Stop being
* called _and_ all encoded media data been passed to OnDataAvailable handler.
* 3) MediaRecorder::Stop is called by user or the document is going to
* inactive or invisible.
*/
class MediaRecorder::Session : public PrincipalChangeObserver<MediaStreamTrack>,
public DOMMediaStream::TrackListener {
NS_INLINE_DECL_THREADSAFE_REFCOUNTING(Session)
// Main thread task.
// Create a blob event and send back to client.
class PushBlobRunnable : public Runnable, public MutableBlobStorageCallback {
public:
// We need to always declare refcounting because
// MutableBlobStorageCallback has pure-virtual refcounting.
NS_DECL_ISUPPORTS_INHERITED
// aDestroyRunnable can be null. If it's not, it will be dispatched after
// the PushBlobRunnable::Run().
PushBlobRunnable(Session* aSession, Runnable* aDestroyRunnable)
: Runnable("dom::MediaRecorder::Session::PushBlobRunnable"),
mSession(aSession),
mDestroyRunnable(aDestroyRunnable) {}
NS_IMETHOD Run() override {
LOG(LogLevel::Debug, ("Session.PushBlobRunnable s=(%p)", mSession.get()));
MOZ_ASSERT(NS_IsMainThread());
mSession->GetBlobWhenReady(this);
return NS_OK;
}
void BlobStoreCompleted(MutableBlobStorage* aBlobStorage, Blob* aBlob,
nsresult aRv) override {
RefPtr<MediaRecorder> recorder = mSession->mRecorder;
if (!recorder) {
return;
}
if (NS_FAILED(aRv)) {
mSession->DoSessionEndTask(aRv);
return;
}
nsresult rv = recorder->CreateAndDispatchBlobEvent(aBlob);
if (NS_FAILED(rv)) {
mSession->DoSessionEndTask(aRv);
}
if (mDestroyRunnable &&
NS_FAILED(NS_DispatchToMainThread(mDestroyRunnable.forget()))) {
MOZ_ASSERT(false, "NS_DispatchToMainThread failed");
}
}
private:
~PushBlobRunnable() = default;
RefPtr<Session> mSession;
// The generation of the blob is async. In order to avoid dispatching the
// DestroyRunnable before pushing the blob event, we store the runnable
// here.
RefPtr<Runnable> mDestroyRunnable;
};
class StoreEncodedBufferRunnable final : public Runnable {
RefPtr<Session> mSession;
nsTArray<nsTArray<uint8_t>> mBuffer;
@ -238,6 +299,31 @@ class MediaRecorder::Session : public PrincipalChangeObserver<MediaStreamTrack>,
}
};
// Notify encoder error, run in main thread task. (Bug 1095381)
class EncoderErrorNotifierRunnable : public Runnable {
public:
explicit EncoderErrorNotifierRunnable(Session* aSession)
: Runnable("dom::MediaRecorder::Session::EncoderErrorNotifierRunnable"),
mSession(aSession) {}
NS_IMETHOD Run() override {
LOG(LogLevel::Debug,
("Session.ErrorNotifyRunnable s=(%p)", mSession.get()));
MOZ_ASSERT(NS_IsMainThread());
RefPtr<MediaRecorder> recorder = mSession->mRecorder;
if (!recorder) {
return NS_OK;
}
recorder->NotifyError(NS_ERROR_UNEXPECTED);
return NS_OK;
}
private:
RefPtr<Session> mSession;
};
// Fire a named event, run in main thread task.
class DispatchEventRunnable : public Runnable {
public:
@ -264,6 +350,75 @@ class MediaRecorder::Session : public PrincipalChangeObserver<MediaStreamTrack>,
nsString mEventName;
};
// Main thread task.
// To delete RecordingSession object.
class DestroyRunnable : public Runnable {
public:
explicit DestroyRunnable(Session* aSession)
: Runnable("dom::MediaRecorder::Session::DestroyRunnable"),
mSession(aSession) {}
explicit DestroyRunnable(already_AddRefed<Session> aSession)
: Runnable("dom::MediaRecorder::Session::DestroyRunnable"),
mSession(aSession) {}
NS_IMETHOD Run() override {
LOG(LogLevel::Debug,
("Session.DestroyRunnable session refcnt = (%d) s=(%p)",
static_cast<int>(mSession->mRefCnt), mSession.get()));
MOZ_ASSERT(NS_IsMainThread() && mSession);
RefPtr<MediaRecorder> recorder = mSession->mRecorder;
if (!recorder) {
return NS_OK;
}
// SourceMediaStream is ended, and send out TRACK_EVENT_END notification.
// Read Thread will be terminate soon.
// We need to switch MediaRecorder to "Stop" state first to make sure
// MediaRecorder is not associated with this Session anymore, then, it's
// safe to delete this Session.
// Also avoid to run if this session already call stop before
if (mSession->mRunningState.isOk() &&
mSession->mRunningState.unwrap() != RunningState::Stopping &&
mSession->mRunningState.unwrap() != RunningState::Stopped) {
recorder->StopForSessionDestruction();
if (NS_FAILED(NS_DispatchToMainThread(
new DestroyRunnable(mSession.forget())))) {
MOZ_ASSERT(false, "NS_DispatchToMainThread failed");
}
return NS_OK;
}
if (mSession->mRunningState.isOk()) {
mSession->mRunningState = RunningState::Stopped;
}
// Dispatch stop event and clear MIME type.
mSession->mMimeType = NS_LITERAL_STRING("");
recorder->SetMimeType(mSession->mMimeType);
recorder->DispatchSimpleEvent(NS_LITERAL_STRING("stop"));
RefPtr<Session> session = mSession.forget();
session->Shutdown()->Then(
GetCurrentThreadSerialEventTarget(), __func__,
[session]() {
gSessions.RemoveEntry(session);
if (gSessions.Count() == 0 && gMediaRecorderShutdownBlocker) {
// All sessions finished before shutdown, no need to keep the
// blocker.
RefPtr<nsIAsyncShutdownClient> barrier = GetShutdownBarrier();
barrier->RemoveBlocker(gMediaRecorderShutdownBlocker);
gMediaRecorderShutdownBlocker = nullptr;
}
},
[]() { MOZ_CRASH("Not reached"); });
return NS_OK;
}
private:
// Call mSession::Release automatically while DestroyRunnable be destroy.
RefPtr<Session> mSession;
};
class EncoderListener : public MediaEncoderListener {
public:
EncoderListener(TaskQueue* aEncoderThread, Session* aSession)
@ -307,21 +462,22 @@ class MediaRecorder::Session : public PrincipalChangeObserver<MediaStreamTrack>,
RefPtr<Session> mSession;
};
friend class EncoderErrorNotifierRunnable;
friend class PushBlobRunnable;
friend class DestroyRunnable;
public:
Session(MediaRecorder* aRecorder, uint32_t aTimeSlice)
: mRecorder(aRecorder),
mMediaStreamReady(false),
mMainThread(mRecorder->GetOwner()->EventTargetFor(TaskCategory::Other)),
mTimeSlice(aTimeSlice),
mStartTime(TimeStamp::Now()),
mRunningState(RunningState::Idling) {
MOZ_ASSERT(NS_IsMainThread());
aRecorder->GetMimeType(mMimeType);
mMaxMemory = Preferences::GetUint("media.recorder.max_memory",
MAX_ALLOW_MEMORY_BUFFER);
mLastBlobTimeStamp = mStartTime;
Telemetry::ScalarAdd(Telemetry::ScalarID::MEDIARECORDER_RECORDING_COUNT, 1);
mLastBlobTimeStamp = TimeStamp::Now();
}
void PrincipalChanged(MediaStreamTrack* aTrack) override {
@ -433,7 +589,7 @@ class MediaRecorder::Session : public PrincipalChangeObserver<MediaStreamTrack>,
if (mRunningState.isOk() &&
mRunningState.unwrap() == RunningState::Idling) {
LOG(LogLevel::Debug, ("Session.Stop Explicit end task %p", this));
// End the Session directly if there is no encoder.
// End the Session directly if there is no ExtractRunnable.
DoSessionEndTask(NS_OK);
} else if (mRunningState.isOk() &&
(mRunningState.unwrap() == RunningState::Starting ||
@ -470,26 +626,17 @@ class MediaRecorder::Session : public PrincipalChangeObserver<MediaStreamTrack>,
return NS_OK;
}
void RequestData() {
nsresult RequestData() {
LOG(LogLevel::Debug, ("Session.RequestData"));
MOZ_ASSERT(NS_IsMainThread());
GatherBlob()->Then(
mMainThread, __func__,
[this, self = RefPtr<Session>(this)](
const BlobPromise::ResolveOrRejectValue& aResult) {
if (aResult.IsReject()) {
LOG(LogLevel::Warning, ("GatherBlob failed for RequestData()"));
DoSessionEndTask(aResult.RejectValue());
return;
}
if (NS_FAILED(
NS_DispatchToMainThread(new PushBlobRunnable(this, nullptr)))) {
MOZ_ASSERT(false, "RequestData NS_DispatchToMainThread failed");
return NS_ERROR_FAILURE;
}
nsresult rv =
mRecorder->CreateAndDispatchBlobEvent(aResult.ResolveValue());
if (NS_FAILED(rv)) {
DoSessionEndTask(NS_OK);
}
});
return NS_OK;
}
void MaybeCreateMutableBlobStorage() {
@ -499,46 +646,14 @@ class MediaRecorder::Session : public PrincipalChangeObserver<MediaStreamTrack>,
}
}
static const bool IsExclusive = true;
using BlobPromise =
MozPromise<nsMainThreadPtrHandle<Blob>, nsresult, IsExclusive>;
class BlobStorer : public MutableBlobStorageCallback {
MozPromiseHolder<BlobPromise> mHolder;
virtual ~BlobStorer() = default;
public:
BlobStorer() = default;
NS_INLINE_DECL_THREADSAFE_REFCOUNTING(BlobStorer, override)
void BlobStoreCompleted(MutableBlobStorage*, Blob* aBlob,
nsresult aRv) override {
MOZ_ASSERT(NS_IsMainThread());
if (NS_FAILED(aRv)) {
mHolder.Reject(aRv, __func__);
} else {
mHolder.Resolve(nsMainThreadPtrHandle<Blob>(
MakeAndAddRef<nsMainThreadPtrHolder<Blob>>(
"BlobStorer::ResolveBlob", aBlob)),
__func__);
}
}
RefPtr<BlobPromise> Promise() { return mHolder.Ensure(__func__); }
};
// Stops gathering data into the current blob and resolves when the current
// blob is available. Future data will be stored in a new blob.
RefPtr<BlobPromise> GatherBlob() {
void GetBlobWhenReady(MutableBlobStorageCallback* aCallback) {
MOZ_ASSERT(NS_IsMainThread());
RefPtr<BlobStorer> storer = MakeAndAddRef<BlobStorer>();
MaybeCreateMutableBlobStorage();
mMutableBlobStorage->GetBlobWhenReady(
mRecorder->GetOwner(), NS_ConvertUTF16toUTF8(mMimeType), storer);
mMutableBlobStorage = nullptr;
return storer->Promise();
MaybeCreateMutableBlobStorage();
mMutableBlobStorage->GetBlobWhenReady(mRecorder->GetParentObject(),
NS_ConvertUTF16toUTF8(mMimeType),
aCallback);
mMutableBlobStorage = nullptr;
}
RefPtr<SizeOfPromise> SizeOfExcludingThis(
@ -563,16 +678,17 @@ class MediaRecorder::Session : public PrincipalChangeObserver<MediaStreamTrack>,
}
private:
// Only DestroyRunnable is allowed to delete Session object on main thread.
virtual ~Session() {
MOZ_ASSERT(NS_IsMainThread());
MOZ_ASSERT(mShutdownPromise);
LOG(LogLevel::Debug, ("Session.~Session (%p)", this));
}
// Pull encoded media data from MediaEncoder and put into MutableBlobStorage.
// If the bool aForceFlush is true, we will force a dispatch of a blob to
// main thread.
void Extract(bool aForceFlush) {
// Destroy this session object in the end of this function.
// If the bool aForceFlush is true, we will force to dispatch a
// PushBlobRunnable to main thread.
void Extract(bool aForceFlush, Runnable* aDestroyRunnable) {
MOZ_ASSERT(mEncoderThread->IsCurrentThreadIn());
LOG(LogLevel::Debug, ("Session.Extract %p", this));
@ -600,24 +716,16 @@ class MediaRecorder::Session : public PrincipalChangeObserver<MediaStreamTrack>,
pushBlob = true;
}
if (pushBlob) {
mLastBlobTimeStamp = TimeStamp::Now();
InvokeAsync(mMainThread, this, __func__, &Session::GatherBlob)
->Then(mMainThread, __func__,
[this, self = RefPtr<Session>(this)](
const BlobPromise::ResolveOrRejectValue& aResult) {
if (aResult.IsReject()) {
LOG(LogLevel::Warning,
("GatherBlob failed for pushing blob"));
DoSessionEndTask(aResult.RejectValue());
return;
}
nsresult rv = mRecorder->CreateAndDispatchBlobEvent(
aResult.ResolveValue());
if (NS_FAILED(rv)) {
DoSessionEndTask(NS_OK);
}
});
if (NS_FAILED(NS_DispatchToMainThread(
new PushBlobRunnable(this, aDestroyRunnable)))) {
MOZ_ASSERT(false, "NS_DispatchToMainThread PushBlobRunnable failed");
} else {
mLastBlobTimeStamp = TimeStamp::Now();
}
} else if (aDestroyRunnable) {
if (NS_FAILED(NS_DispatchToMainThread(aDestroyRunnable))) {
MOZ_ASSERT(false, "NS_DispatchToMainThread DestroyRunnable failed");
}
}
}
@ -671,7 +779,7 @@ class MediaRecorder::Session : public PrincipalChangeObserver<MediaStreamTrack>,
// When MediaRecorder supports multiple tracks, we should set up a single
// MediaInputPort from the input stream, and let main thread check
// track principals async later.
nsPIDOMWindowInner* window = mRecorder->GetOwner();
nsPIDOMWindowInner* window = mRecorder->GetParentObject();
Document* document = window ? window->GetExtantDoc() : nullptr;
nsContentUtils::ReportToConsole(nsIScriptError::errorFlag,
NS_LITERAL_CSTRING("Media"), document,
@ -874,18 +982,12 @@ class MediaRecorder::Session : public PrincipalChangeObserver<MediaStreamTrack>,
// appropriate video keyframe interval defined in milliseconds.
mEncoder->SetVideoKeyFrameInterval(mTimeSlice);
// Set mRunningState to Running so that DoSessionEndTask will
// Set mRunningState to Running so that ExtractRunnable/DestroyRunnable will
// take the responsibility to end the session.
mRunningState = RunningState::Starting;
}
// This is the task that will stop recording per spec:
// - Stop gathering data (this is inherently async)
// - Set state to "inactive"
// - Fire an error event, if NS_FAILED(rv)
// - Discard blob data if rv is NS_ERROR_DOM_SECURITY_ERR
// - Fire a Blob event
// - Fire an event named stop
// application should get blob and onstop event
void DoSessionEndTask(nsresult rv) {
MOZ_ASSERT(NS_IsMainThread());
if (mRunningState.isErr()) {
@ -899,11 +1001,11 @@ class MediaRecorder::Session : public PrincipalChangeObserver<MediaStreamTrack>,
return;
}
bool needsStartEvent = false;
if (mRunningState.isOk() &&
(mRunningState.unwrap() == RunningState::Idling ||
mRunningState.unwrap() == RunningState::Starting)) {
needsStartEvent = true;
NS_DispatchToMainThread(
new DispatchEventRunnable(this, NS_LITERAL_STRING("start")));
}
if (rv == NS_OK) {
@ -912,100 +1014,77 @@ class MediaRecorder::Session : public PrincipalChangeObserver<MediaStreamTrack>,
mRunningState = Err(rv);
}
GatherBlob()
->Then(mMainThread, __func__,
[this, self = RefPtr<Session>(this), rv, needsStartEvent](
const BlobPromise::ResolveOrRejectValue& aResult) {
if (mRecorder->mSessions.LastElement() == this) {
// Set state to inactive, but only if the recorder is not
// controlled by another session already.
mRecorder->ForceInactive();
}
if (NS_FAILED(rv)) {
mRecorder->ForceInactive();
NS_DispatchToMainThread(NewRunnableMethod<nsresult>(
"dom::MediaRecorder::NotifyError", mRecorder,
&MediaRecorder::NotifyError, rv));
}
if (needsStartEvent) {
mRecorder->DispatchSimpleEvent(NS_LITERAL_STRING("start"));
}
RefPtr<Runnable> destroyRunnable = new DestroyRunnable(this);
// If there was an error, Fire the appropriate one
if (NS_FAILED(rv)) {
mRecorder->NotifyError(rv);
}
// Fire a blob event named dataavailable
RefPtr<Blob> blob;
if (rv == NS_ERROR_DOM_SECURITY_ERR || aResult.IsReject()) {
// In case of SecurityError, the blob data must be discarded.
// We create a new empty one and throw the blob with its data
// away.
// In case we failed to gather blob data, we create an empty
// memory blob instead.
blob = Blob::CreateEmptyBlob(mRecorder->GetParentObject(),
mMimeType);
} else {
blob = aResult.ResolveValue();
}
if (NS_FAILED(mRecorder->CreateAndDispatchBlobEvent(blob))) {
// Failed to dispatch blob event. That's unexpected. It's
// probably all right to fire an error event if we haven't
// already.
if (NS_SUCCEEDED(rv)) {
mRecorder->NotifyError(NS_ERROR_FAILURE);
}
}
// Dispatch stop event and clear MIME type.
mMimeType = NS_LITERAL_STRING("");
mRecorder->SetMimeType(mMimeType);
// Fire an event named stop
mRecorder->DispatchSimpleEvent(NS_LITERAL_STRING("stop"));
// And finally, Shutdown and destroy the Session
return Shutdown();
})
->Then(mMainThread, __func__, [this, self = RefPtr<Session>(this)] {
gSessions.RemoveEntry(this);
if (gSessions.Count() == 0 && gMediaRecorderShutdownBlocker) {
// All sessions finished before shutdown, no need to keep the
// blocker.
RefPtr<nsIAsyncShutdownClient> barrier = GetShutdownBarrier();
barrier->RemoveBlocker(gMediaRecorderShutdownBlocker);
gMediaRecorderShutdownBlocker = nullptr;
}
});
if (rv != NS_ERROR_DOM_SECURITY_ERR) {
// Don't push a blob if there was a security error.
if (NS_FAILED(NS_DispatchToMainThread(
new PushBlobRunnable(this, destroyRunnable)))) {
MOZ_ASSERT(false, "NS_DispatchToMainThread PushBlobRunnable failed");
}
} else {
if (NS_FAILED(NS_DispatchToMainThread(destroyRunnable))) {
MOZ_ASSERT(false, "NS_DispatchToMainThread DestroyRunnable failed");
}
}
}
void MediaEncoderInitialized() {
MOZ_ASSERT(mEncoderThread->IsCurrentThreadIn());
NS_DispatchToMainThread(NewRunnableFrom([self = RefPtr<Session>(this), this,
mime = mEncoder->MimeType()]() {
if (mRunningState.isErr()) {
// Pull encoded metadata from MediaEncoder
nsTArray<nsTArray<uint8_t>> encodedBuf;
nsString mime;
nsresult rv = mEncoder->GetEncodedMetadata(&encodedBuf, mime);
if (NS_FAILED(rv)) {
MOZ_ASSERT(false);
return;
}
// Append pulled data into cache buffer.
NS_DispatchToMainThread(
new StoreEncodedBufferRunnable(this, std::move(encodedBuf)));
RefPtr<Session> self = this;
NS_DispatchToMainThread(NewRunnableFrom([self, mime]() {
if (!self->mRecorder) {
MOZ_ASSERT_UNREACHABLE("Recorder should be live");
return NS_OK;
}
mMimeType = mime;
mRecorder->SetMimeType(mime);
auto state = mRunningState.unwrap();
if (state == RunningState::Starting || state == RunningState::Stopping) {
if (state == RunningState::Starting) {
// We set it to Running in the runnable since we can only assign
// mRunningState on main thread. We set it before running the start
// event runnable since that dispatches synchronously (and may cause
// js calls to methods depending on mRunningState).
mRunningState = RunningState::Running;
if (self->mRunningState.isOk()) {
auto state = self->mRunningState.unwrap();
if (state == RunningState::Starting ||
state == RunningState::Stopping) {
if (state == RunningState::Starting) {
// We set it to Running in the runnable since we can only assign
// mRunningState on main thread. We set it before running the start
// event runnable since that dispatches synchronously (and may cause
// js calls to methods depending on mRunningState).
self->mRunningState = RunningState::Running;
}
self->mMimeType = mime;
self->mRecorder->SetMimeType(self->mMimeType);
auto startEvent = MakeRefPtr<DispatchEventRunnable>(
self, NS_LITERAL_STRING("start"));
startEvent->Run();
}
mRecorder->DispatchSimpleEvent(NS_LITERAL_STRING("start"));
}
return NS_OK;
}));
Extract(false);
}
void MediaEncoderDataAvailable() {
MOZ_ASSERT(mEncoderThread->IsCurrentThreadIn());
Extract(false);
Extract(false, nullptr);
}
void MediaEncoderError() {
@ -1019,9 +1098,12 @@ class MediaRecorder::Session : public PrincipalChangeObserver<MediaStreamTrack>,
MOZ_ASSERT(mEncoderThread->IsCurrentThreadIn());
MOZ_ASSERT(mEncoder->IsShutdown());
mMainThread->Dispatch(NewRunnableMethod<nsresult>(
"MediaRecorder::Session::MediaEncoderShutdown->DoSessionEndTask", this,
&Session::DoSessionEndTask, NS_OK));
// For the stop event. Let's the creation of the blob to dispatch this
// runnable.
RefPtr<Runnable> destroyRunnable = new DestroyRunnable(this);
// Forces the last blob even if it's not time for it yet.
Extract(true, destroyRunnable);
// Clean up.
mEncoderListener->Forget();
@ -1038,13 +1120,6 @@ class MediaRecorder::Session : public PrincipalChangeObserver<MediaStreamTrack>,
return mShutdownPromise;
}
// This is a coarse calculation and does not reflect the duration of the
// final recording for reasons such as pauses. However it allows us an
// idea of how long people are running their recorders for.
TimeDuration timeDelta = TimeStamp::Now() - mStartTime;
Telemetry::Accumulate(Telemetry::MEDIA_RECORDER_RECORDING_DURATION,
timeDelta.ToSeconds());
mShutdownPromise = ShutdownPromise::CreateAndResolve(true, __func__);
RefPtr<Session> self = this;
@ -1081,16 +1156,19 @@ class MediaRecorder::Session : public PrincipalChangeObserver<MediaStreamTrack>,
}
// Break the cycle reference between Session and MediaRecorder.
mShutdownPromise = mShutdownPromise->Then(
GetCurrentThreadSerialEventTarget(), __func__,
[self]() {
self->mRecorder->RemoveSession(self);
return ShutdownPromise::CreateAndResolve(true, __func__);
},
[]() {
MOZ_ASSERT_UNREACHABLE("Unexpected reject");
return ShutdownPromise::CreateAndReject(false, __func__);
});
if (mRecorder) {
mShutdownPromise = mShutdownPromise->Then(
GetCurrentThreadSerialEventTarget(), __func__,
[self]() {
self->mRecorder->RemoveSession(self);
self->mRecorder = nullptr;
return ShutdownPromise::CreateAndResolve(true, __func__);
},
[]() {
MOZ_ASSERT_UNREACHABLE("Unexpected reject");
return ShutdownPromise::CreateAndReject(false, __func__);
});
}
if (mEncoderThread) {
RefPtr<TaskQueue>& encoderThread = mEncoderThread;
@ -1115,8 +1193,9 @@ class MediaRecorder::Session : public PrincipalChangeObserver<MediaStreamTrack>,
Stopped, // Session has stopped without any error
};
// Our associated MediaRecorder.
const RefPtr<MediaRecorder> mRecorder;
// Hold reference to MediaRecorder that ensure MediaRecorder is alive
// if there is an active session. Access ONLY on main thread.
RefPtr<MediaRecorder> mRecorder;
// Stream currently recorded.
RefPtr<DOMMediaStream> mMediaStream;
@ -1128,8 +1207,6 @@ class MediaRecorder::Session : public PrincipalChangeObserver<MediaStreamTrack>,
// set.
nsTArray<RefPtr<MediaStreamTrack>> mMediaStreamTracks;
// Main thread used for MozPromise operations.
const RefPtr<nsISerialEventTarget> mMainThread;
// Runnable thread for reading data from MediaEncoder.
RefPtr<TaskQueue> mEncoderThread;
// MediaEncoder pipeline.
@ -1149,14 +1226,14 @@ class MediaRecorder::Session : public PrincipalChangeObserver<MediaStreamTrack>,
// The interval of passing encoded data from MutableBlobStorage to
// onDataAvailable handler.
const uint32_t mTimeSlice;
// The time this session started, for telemetry.
const TimeStamp mStartTime;
// The session's current main thread state. The error type gets set when
// ending a recording with an error. An NS_OK error is invalid.
// The session's current main thread state. The error type gets setwhen ending
// a recording with an error. An NS_OK error is invalid.
// Main thread only.
Result<RunningState, nsresult> mRunningState;
};
NS_IMPL_ISUPPORTS_INHERITED0(MediaRecorder::Session::PushBlobRunnable, Runnable)
MediaRecorder::~MediaRecorder() {
LOG(LogLevel::Debug, ("~MediaRecorder (%p)", this));
UnRegisterActivityObserver();
@ -1251,6 +1328,8 @@ void MediaRecorder::Start(const Optional<uint32_t>& aTimeSlice,
mSessions.AppendElement();
mSessions.LastElement() = new Session(this, timeSlice);
mSessions.LastElement()->Start();
mStartTime = TimeStamp::Now();
Telemetry::ScalarAdd(Telemetry::ScalarID::MEDIARECORDER_RECORDING_COUNT, 1);
}
void MediaRecorder::Stop(ErrorResult& aResult) {
@ -1312,7 +1391,10 @@ void MediaRecorder::RequestData(ErrorResult& aResult) {
return;
}
MOZ_ASSERT(mSessions.Length() > 0);
mSessions.LastElement()->RequestData();
nsresult rv = mSessions.LastElement()->RequestData();
if (NS_FAILED(rv)) {
NotifyError(rv);
}
}
JSObject* MediaRecorder::WrapObject(JSContext* aCx,
@ -1601,6 +1683,22 @@ void MediaRecorder::ForceInactive() {
mState = RecordingState::Inactive;
}
void MediaRecorder::StopForSessionDestruction() {
LOG(LogLevel::Debug, ("MediaRecorder.StopForSessionDestruction %p", this));
MediaRecorderReporter::RemoveMediaRecorder(this);
// We do not perform a mState != RecordingState::Recording) check here as
// we may already be inactive due to ForceInactive().
mState = RecordingState::Inactive;
MOZ_ASSERT(mSessions.Length() > 0);
mSessions.LastElement()->Stop();
// This is a coarse calculation and does not reflect the duration of the
// final recording for reasons such as pauses. However it allows us an idea
// of how long people are running their recorders for.
TimeDuration timeDelta = TimeStamp::Now() - mStartTime;
Telemetry::Accumulate(Telemetry::MEDIA_RECORDER_RECORDING_DURATION,
timeDelta.ToSeconds());
}
void MediaRecorder::InitializeDomExceptions() {
mSecurityDomException = DOMException::Create(NS_ERROR_DOM_SECURITY_ERR);
mUnknownDomException = DOMException::Create(NS_ERROR_DOM_UNKNOWN_ERR);
@ -1639,5 +1737,3 @@ StaticRefPtr<MediaRecorderReporter> MediaRecorderReporter::sUniqueInstance;
} // namespace dom
} // namespace mozilla
#undef LOG

View File

@ -61,6 +61,8 @@ class MediaRecorder final : public DOMEventTargetHelper,
JSObject* WrapObject(JSContext* aCx,
JS::Handle<JSObject*> aGivenProto) override;
nsPIDOMWindowInner* GetParentObject() { return GetOwner(); }
NS_DECL_ISUPPORTS_INHERITED
NS_DECL_CYCLE_COLLECTION_CLASS_INHERITED(MediaRecorder, DOMEventTargetHelper)
@ -175,6 +177,8 @@ class MediaRecorder final : public DOMEventTargetHelper,
uint32_t mVideoBitsPerSecond;
uint32_t mBitsPerSecond;
TimeStamp mStartTime;
// DOMExceptions that are created early and possibly thrown in NotifyError.
// Creating them early allows us to capture the JS stack for which cannot be
// done at the time the error event is fired.

View File

@ -7,7 +7,7 @@
#define ContainerWriter_h_
#include "nsTArray.h"
#include "EncodedFrame.h"
#include "EncodedFrameContainer.h"
#include "TrackMetadataBase.h"
namespace mozilla {
@ -26,26 +26,23 @@ class ContainerWriter {
enum { END_OF_STREAM = 1 << 0 };
/**
* Writes encoded track data from aData into the internal stream of container
* writer. aFlags is used to signal the impl of different conditions
* such as END_OF_STREAM. Each impl may handle different flags, and should be
* documented accordingly. Currently, WriteEncodedTrack doesn't support
* explicit track specification, though each impl may provide logic to
* allocate frames into different tracks.
* Writes encoded track data from aBuffer to a packet, and insert this packet
* into the internal stream of container writer. aDuration is the playback
* duration of this packet in number of samples. aFlags is true with
* END_OF_STREAM if this is the last packet of track.
* Currently, WriteEncodedTrack doesn't support multiple tracks.
*/
virtual nsresult WriteEncodedTrack(
const nsTArray<RefPtr<EncodedFrame>>& aData, uint32_t aFlags = 0) = 0;
virtual nsresult WriteEncodedTrack(const EncodedFrameContainer& aData,
uint32_t aFlags = 0) = 0;
/**
* Stores the metadata for all given tracks to the muxer.
*
* This method checks the integrity of aMetadata.
* If the metadata isn't well formatted, this method returns NS_ERROR_FAILURE.
* If the metadata is well formatted, it stores the metadata and returns
* Set the meta data pointer into muxer
* This function will check the integrity of aMetadata.
* If the meta data isn't well format, this function will return
* NS_ERROR_FAILURE to caller, else save the pointer to mMetadata and return
* NS_OK.
*/
virtual nsresult SetMetadata(
const nsTArray<RefPtr<TrackMetadataBase>>& aMetadata) = 0;
virtual nsresult SetMetadata(TrackMetadataBase* aMetadata) = 0;
/**
* Indicate if the writer has finished to output data
@ -62,7 +59,7 @@ class ContainerWriter {
* even it is not full, and copy these container data to a buffer for
* aOutputBufs to append.
*/
virtual nsresult GetContainerData(nsTArray<nsTArray<uint8_t>>* aOutputBufs,
virtual nsresult GetContainerData(nsTArray<nsTArray<uint8_t> >* aOutputBufs,
uint32_t aFlags = 0) = 0;
protected:

View File

@ -1,71 +0,0 @@
/* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*-*/
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this file,
* You can obtain one at http://mozilla.org/MPL/2.0/. */
#ifndef EncodedFrame_h_
#define EncodedFrame_h_
#include "nsISupportsImpl.h"
#include "VideoUtils.h"
namespace mozilla {
// Represent an encoded frame emitted by an encoder
class EncodedFrame final {
NS_INLINE_DECL_THREADSAFE_REFCOUNTING(EncodedFrame)
public:
EncodedFrame() : mTime(0), mDuration(0), mFrameType(UNKNOWN) {}
enum FrameType {
VP8_I_FRAME, // VP8 intraframe
VP8_P_FRAME, // VP8 predicted frame
OPUS_AUDIO_FRAME, // Opus audio frame
UNKNOWN // FrameType not set
};
void SwapInFrameData(nsTArray<uint8_t>& aData) {
mFrameData.SwapElements(aData);
}
nsresult SwapOutFrameData(nsTArray<uint8_t>& aData) {
if (mFrameType != UNKNOWN) {
// Reset this frame type to UNKNOWN once the data is swapped out.
mFrameData.SwapElements(aData);
mFrameType = UNKNOWN;
return NS_OK;
}
return NS_ERROR_FAILURE;
}
const nsTArray<uint8_t>& GetFrameData() const { return mFrameData; }
// Timestamp in microseconds
uint64_t mTime;
// The playback duration of this packet. The unit is determined by the use
// case. For VP8 the unit should be microseconds. For opus this is the number
// of samples.
uint64_t mDuration;
// Represent what is in the FrameData
FrameType mFrameType;
uint64_t GetEndTime() const {
// Defend against untested types. This assert can be removed but we want
// to make sure other types are correctly accounted for.
MOZ_ASSERT(mFrameType == OPUS_AUDIO_FRAME || mFrameType == VP8_I_FRAME ||
mFrameType == VP8_P_FRAME);
if (mFrameType == OPUS_AUDIO_FRAME) {
// See bug 1356054 for discussion around standardization of time units
// (can remove videoutils import when this goes)
return mTime + FramesToUsecs(mDuration, 48000).value();
} else {
return mTime + mDuration;
}
}
private:
// Private destructor, to discourage deletion outside of Release():
~EncodedFrame() {}
// Encoded data
nsTArray<uint8_t> mFrameData;
};
} // namespace mozilla
#endif // EncodedFrame_h_

View File

@ -0,0 +1,97 @@
/* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*-*/
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this file,
* You can obtain one at http://mozilla.org/MPL/2.0/. */
#ifndef EncodedFrameContainer_H_
#define EncodedFrameContainer_H_
#include "nsTArray.h"
namespace mozilla {
class EncodedFrame;
/*
* This container is used to carry video or audio encoded data from encoder to
* muxer. The media data object is created by encoder and recycle by the
* destructor. Only allow to store audio or video encoded data in EncodedData.
*/
class EncodedFrameContainer {
public:
// Append encoded frame data
void AppendEncodedFrame(EncodedFrame* aEncodedFrame) {
mEncodedFrames.AppendElement(aEncodedFrame);
}
// Retrieve all of the encoded frames
const nsTArray<RefPtr<EncodedFrame> >& GetEncodedFrames() const {
return mEncodedFrames;
}
private:
// This container is used to store the video or audio encoded packets.
// Muxer should check mFrameType and get the encoded data type from
// mEncodedFrames.
nsTArray<RefPtr<EncodedFrame> > mEncodedFrames;
};
// Represent one encoded frame
class EncodedFrame final {
NS_INLINE_DECL_THREADSAFE_REFCOUNTING(EncodedFrame)
public:
EncodedFrame() : mTimeStamp(0), mDuration(0), mFrameType(UNKNOWN) {}
enum FrameType {
VP8_I_FRAME, // VP8 intraframe
VP8_P_FRAME, // VP8 predicted frame
OPUS_AUDIO_FRAME, // Opus audio frame
VORBIS_AUDIO_FRAME,
AVC_I_FRAME,
AVC_P_FRAME,
AVC_B_FRAME,
AVC_CSD, // AVC codec specific data
AAC_AUDIO_FRAME,
AAC_CSD, // AAC codec specific data
AMR_AUDIO_CSD,
AMR_AUDIO_FRAME,
EVRC_AUDIO_CSD,
EVRC_AUDIO_FRAME,
UNKNOWN // FrameType not set
};
void SwapInFrameData(nsTArray<uint8_t>& aData) {
mFrameData.SwapElements(aData);
}
nsresult SwapOutFrameData(nsTArray<uint8_t>& aData) {
if (mFrameType != UNKNOWN) {
// Reset this frame type to UNKNOWN once the data is swapped out.
mFrameData.SwapElements(aData);
mFrameType = UNKNOWN;
return NS_OK;
}
return NS_ERROR_FAILURE;
}
const nsTArray<uint8_t>& GetFrameData() const { return mFrameData; }
uint64_t GetTimeStamp() const { return mTimeStamp; }
void SetTimeStamp(uint64_t aTimeStamp) { mTimeStamp = aTimeStamp; }
uint64_t GetDuration() const { return mDuration; }
void SetDuration(uint64_t aDuration) { mDuration = aDuration; }
FrameType GetFrameType() const { return mFrameType; }
void SetFrameType(FrameType aFrameType) { mFrameType = aFrameType; }
private:
// Private destructor, to discourage deletion outside of Release():
~EncodedFrame() {}
// Encoded data
nsTArray<uint8_t> mFrameData;
uint64_t mTimeStamp;
// The playback duration of this packet in number of samples
uint64_t mDuration;
// Represent what is in the FrameData
FrameType mFrameType;
};
} // namespace mozilla
#endif

View File

@ -25,7 +25,6 @@
#include "mozilla/StaticPtr.h"
#include "mozilla/TaskQueue.h"
#include "mozilla/Unused.h"
#include "Muxer.h"
#include "nsIPrincipal.h"
#include "nsMimeTypes.h"
#include "nsThreadUtils.h"
@ -39,6 +38,10 @@
# include "WebMWriter.h"
#endif
#ifdef LOG
# undef LOG
#endif
mozilla::LazyLogModule gMediaEncoderLog("MediaEncoder");
#define LOG(type, msg) MOZ_LOG(gMediaEncoderLog, type, msg)
@ -395,13 +398,14 @@ MediaEncoder::MediaEncoder(TaskQueue* aEncoderThread,
VideoTrackEncoder* aVideoEncoder,
TrackRate aTrackRate, const nsAString& aMIMEType)
: mEncoderThread(aEncoderThread),
mMuxer(MakeUnique<Muxer>(std::move(aWriter))),
mWriter(std::move(aWriter)),
mAudioEncoder(aAudioEncoder),
mVideoEncoder(aVideoEncoder),
mEncoderListener(MakeAndAddRef<EncoderListener>(mEncoderThread, this)),
mStartTime(TimeStamp::Now()),
mMIMEType(aMIMEType),
mInitialized(false),
mMetadataEncoded(false),
mCompleted(false),
mError(false),
mCanceled(false),
@ -428,14 +432,7 @@ MediaEncoder::MediaEncoder(TaskQueue* aEncoderThread,
}
}
MediaEncoder::~MediaEncoder() {
MOZ_ASSERT(mListeners.IsEmpty());
MOZ_ASSERT(!mAudioTrack);
MOZ_ASSERT(!mVideoTrack);
MOZ_ASSERT(!mAudioNode);
MOZ_ASSERT(!mInputPort);
MOZ_ASSERT(!mPipeStream);
}
MediaEncoder::~MediaEncoder() { MOZ_ASSERT(mListeners.IsEmpty()); }
void MediaEncoder::RunOnGraph(already_AddRefed<Runnable> aRunnable) {
MediaStreamGraphImpl* graph;
@ -654,7 +651,7 @@ already_AddRefed<MediaEncoder> MediaEncoder::CreateEncoder(
driftCompensator, aTrackRate, FrameDroppingMode::DISALLOW);
}
}
writer = MakeUnique<WebMWriter>();
writer = MakeUnique<WebMWriter>(aTrackTypes);
mimeType = NS_LITERAL_STRING(VIDEO_WEBM);
} else if (MediaEncoder::IsWebMEncoderEnabled() &&
aMIMEType.EqualsLiteral(AUDIO_WEBM) &&
@ -675,7 +672,7 @@ already_AddRefed<MediaEncoder> MediaEncoder::CreateEncoder(
} else {
mimeType = NS_LITERAL_STRING(AUDIO_WEBM);
}
writer = MakeUnique<WebMWriter>();
writer = MakeUnique<WebMWriter>(aTrackTypes);
}
#endif // MOZ_WEBM_ENCODER
else if (MediaDecoder::IsOggEnabled() && MediaDecoder::IsOpusEnabled() &&
@ -702,7 +699,7 @@ already_AddRefed<MediaEncoder> MediaEncoder::CreateEncoder(
driftCompensator, aTrackRate, FrameDroppingMode::DISALLOW);
}
}
writer = MakeUnique<WebMWriter>();
writer = MakeUnique<WebMWriter>(aTrackTypes);
mimeType = NS_LITERAL_STRING(VIDEO_WEBM);
}
#endif // MOZ_WEBM_ENCODER
@ -740,78 +737,122 @@ already_AddRefed<MediaEncoder> MediaEncoder::CreateEncoder(
audioEncoder, videoEncoder, aTrackRate, mimeType);
}
nsresult MediaEncoder::GetEncodedMetadata(
nsTArray<nsTArray<uint8_t>>* aOutputBufs, nsAString& aMIMEType) {
AUTO_PROFILER_LABEL("MediaEncoder::GetEncodedMetadata", OTHER);
MOZ_ASSERT(mEncoderThread->IsCurrentThreadIn());
if (mShutdown) {
MOZ_ASSERT(false);
return NS_ERROR_FAILURE;
}
if (!mInitialized) {
MOZ_ASSERT(false);
return NS_ERROR_FAILURE;
}
if (mMetadataEncoded) {
MOZ_ASSERT(false);
return NS_ERROR_FAILURE;
}
aMIMEType = mMIMEType;
LOG(LogLevel::Verbose,
("GetEncodedMetadata TimeStamp = %f", GetEncodeTimeStamp()));
nsresult rv;
if (mAudioEncoder) {
if (!mAudioEncoder->IsInitialized()) {
LOG(LogLevel::Error,
("GetEncodedMetadata Audio encoder not initialized"));
MOZ_ASSERT(false);
return NS_ERROR_FAILURE;
}
rv = CopyMetadataToMuxer(mAudioEncoder);
if (NS_FAILED(rv)) {
LOG(LogLevel::Error, ("Failed to Set Audio Metadata"));
SetError();
return rv;
}
}
if (mVideoEncoder) {
if (!mVideoEncoder->IsInitialized()) {
LOG(LogLevel::Error,
("GetEncodedMetadata Video encoder not initialized"));
MOZ_ASSERT(false);
return NS_ERROR_FAILURE;
}
rv = CopyMetadataToMuxer(mVideoEncoder.get());
if (NS_FAILED(rv)) {
LOG(LogLevel::Error, ("Failed to Set Video Metadata"));
SetError();
return rv;
}
}
rv = mWriter->GetContainerData(aOutputBufs, ContainerWriter::GET_HEADER);
if (NS_FAILED(rv)) {
LOG(LogLevel::Error, ("Writer fail to generate header!"));
SetError();
return rv;
}
LOG(LogLevel::Verbose,
("Finish GetEncodedMetadata TimeStamp = %f", GetEncodeTimeStamp()));
mMetadataEncoded = true;
return NS_OK;
}
nsresult MediaEncoder::GetEncodedData(
nsTArray<nsTArray<uint8_t>>* aOutputBufs) {
AUTO_PROFILER_LABEL("MediaEncoder::GetEncodedData", OTHER);
MOZ_ASSERT(mEncoderThread->IsCurrentThreadIn());
MOZ_ASSERT(mInitialized);
MOZ_ASSERT_IF(mAudioEncoder, mAudioEncoder->IsInitialized());
MOZ_ASSERT_IF(mVideoEncoder, mVideoEncoder->IsInitialized());
if (!mMetadataEncoded) {
MOZ_ASSERT(false);
return NS_ERROR_FAILURE;
}
nsresult rv;
LOG(LogLevel::Verbose,
("GetEncodedData TimeStamp = %f", GetEncodeTimeStamp()));
EncodedFrameContainer encodedData;
if (mMuxer->NeedsMetadata()) {
nsTArray<RefPtr<TrackMetadataBase>> meta;
if (mAudioEncoder && !*meta.AppendElement(mAudioEncoder->GetMetadata())) {
LOG(LogLevel::Error, ("Audio metadata is null"));
SetError();
return NS_ERROR_ABORT;
}
if (mVideoEncoder && !*meta.AppendElement(mVideoEncoder->GetMetadata())) {
LOG(LogLevel::Error, ("Video metadata is null"));
SetError();
return NS_ERROR_ABORT;
}
rv = mMuxer->SetMetadata(meta);
if (mVideoEncoder) {
// We're most likely to actually wait for a video frame, so do that first
// to minimize capture offset/lipsync issues.
rv = WriteEncodedDataToMuxer(mVideoEncoder);
LOG(LogLevel::Verbose,
("Video encoded TimeStamp = %f", GetEncodeTimeStamp()));
if (NS_FAILED(rv)) {
LOG(LogLevel::Error, ("SetMetadata failed"));
SetError();
LOG(LogLevel::Warning, ("Failed to write encoded video data to muxer"));
return rv;
}
}
// First, feed encoded data from encoders to muxer.
if (mVideoEncoder && !mVideoEncoder->IsEncodingComplete()) {
nsTArray<RefPtr<EncodedFrame>> videoFrames;
rv = mVideoEncoder->GetEncodedTrack(videoFrames);
if (mAudioEncoder) {
rv = WriteEncodedDataToMuxer(mAudioEncoder);
LOG(LogLevel::Verbose,
("Audio encoded TimeStamp = %f", GetEncodeTimeStamp()));
if (NS_FAILED(rv)) {
// Encoding might be canceled.
LOG(LogLevel::Error, ("Failed to get encoded data from video encoder."));
LOG(LogLevel::Warning, ("Failed to write encoded audio data to muxer"));
return rv;
}
for (const RefPtr<EncodedFrame>& frame : videoFrames) {
mMuxer->AddEncodedVideoFrame(frame);
}
if (mVideoEncoder->IsEncodingComplete()) {
mMuxer->VideoEndOfStream();
}
}
if (mAudioEncoder && !mAudioEncoder->IsEncodingComplete()) {
nsTArray<RefPtr<EncodedFrame>> audioFrames;
rv = mAudioEncoder->GetEncodedTrack(audioFrames);
if (NS_FAILED(rv)) {
// Encoding might be canceled.
LOG(LogLevel::Error, ("Failed to get encoded data from audio encoder."));
return rv;
}
for (const RefPtr<EncodedFrame>& frame : audioFrames) {
mMuxer->AddEncodedAudioFrame(frame);
}
if (mAudioEncoder->IsEncodingComplete()) {
mMuxer->AudioEndOfStream();
}
}
// Second, get data from muxer. This will do the actual muxing.
rv = mMuxer->GetData(aOutputBufs);
if (mMuxer->IsFinished()) {
// In audio only or video only case, let unavailable track's flag to be
// true.
bool isAudioCompleted = !mAudioEncoder || mAudioEncoder->IsEncodingComplete();
bool isVideoCompleted = !mVideoEncoder || mVideoEncoder->IsEncodingComplete();
rv = mWriter->GetContainerData(
aOutputBufs,
isAudioCompleted && isVideoCompleted ? ContainerWriter::FLUSH_NEEDED : 0);
if (mWriter->IsWritingComplete()) {
mCompleted = true;
Shutdown();
}
@ -819,9 +860,7 @@ nsresult MediaEncoder::GetEncodedData(
LOG(LogLevel::Verbose,
("END GetEncodedData TimeStamp=%f "
"mCompleted=%d, aComplete=%d, vComplete=%d",
GetEncodeTimeStamp(), mCompleted,
!mAudioEncoder || mAudioEncoder->IsEncodingComplete(),
!mVideoEncoder || mVideoEncoder->IsEncodingComplete()));
GetEncodeTimeStamp(), mCompleted, isAudioCompleted, isVideoCompleted));
return rv;
}
@ -865,6 +904,64 @@ void MediaEncoder::Shutdown() {
}
}
nsresult MediaEncoder::WriteEncodedDataToMuxer(TrackEncoder* aTrackEncoder) {
AUTO_PROFILER_LABEL("MediaEncoder::WriteEncodedDataToMuxer", OTHER);
MOZ_ASSERT(mEncoderThread->IsCurrentThreadIn());
if (!aTrackEncoder) {
NS_ERROR("No track encoder to get data from");
return NS_ERROR_FAILURE;
}
if (aTrackEncoder->IsEncodingComplete()) {
return NS_OK;
}
EncodedFrameContainer encodedData;
nsresult rv = aTrackEncoder->GetEncodedTrack(encodedData);
if (NS_FAILED(rv)) {
// Encoding might be canceled.
LOG(LogLevel::Error, ("Failed to get encoded data from encoder."));
SetError();
return rv;
}
rv = mWriter->WriteEncodedTrack(
encodedData,
aTrackEncoder->IsEncodingComplete() ? ContainerWriter::END_OF_STREAM : 0);
if (NS_FAILED(rv)) {
LOG(LogLevel::Error,
("Failed to write encoded track to the media container."));
SetError();
}
return rv;
}
nsresult MediaEncoder::CopyMetadataToMuxer(TrackEncoder* aTrackEncoder) {
AUTO_PROFILER_LABEL("MediaEncoder::CopyMetadataToMuxer", OTHER);
MOZ_ASSERT(mEncoderThread->IsCurrentThreadIn());
if (!aTrackEncoder) {
NS_ERROR("No track encoder to get metadata from");
return NS_ERROR_FAILURE;
}
RefPtr<TrackMetadataBase> meta = aTrackEncoder->GetMetadata();
if (meta == nullptr) {
LOG(LogLevel::Error, ("metadata == null"));
SetError();
return NS_ERROR_ABORT;
}
nsresult rv = mWriter->SetMetadata(meta);
if (NS_FAILED(rv)) {
LOG(LogLevel::Error, ("SetMetadata failed"));
SetError();
}
return rv;
}
bool MediaEncoder::IsShutdown() {
MOZ_ASSERT(mEncoderThread->IsCurrentThreadIn());
return mShutdown;
@ -873,8 +970,6 @@ bool MediaEncoder::IsShutdown() {
void MediaEncoder::Cancel() {
MOZ_ASSERT(NS_IsMainThread());
Stop();
RefPtr<MediaEncoder> self = this;
nsresult rv = mEncoderThread->Dispatch(NewRunnableFrom([self]() mutable {
self->mCanceled = true;
@ -945,11 +1040,6 @@ bool MediaEncoder::IsWebMEncoderEnabled() {
}
#endif
const nsString& MediaEncoder::MimeType() const {
MOZ_ASSERT(mEncoderThread->IsCurrentThreadIn());
return mMIMEType;
}
void MediaEncoder::NotifyInitialized() {
MOZ_ASSERT(mEncoderThread->IsCurrentThreadIn());
@ -1030,5 +1120,3 @@ void MediaEncoder::SetVideoKeyFrameInterval(int32_t aVideoKeyFrameInterval) {
}
} // namespace mozilla
#undef LOG

View File

@ -8,7 +8,6 @@
#include "ContainerWriter.h"
#include "CubebUtils.h"
#include "MediaQueue.h"
#include "MediaStreamGraph.h"
#include "MediaStreamListener.h"
#include "mozilla/DebugOnly.h"
@ -20,7 +19,6 @@
namespace mozilla {
class DriftCompensator;
class Muxer;
class Runnable;
class TaskQueue;
@ -78,21 +76,29 @@ class MediaEncoderListener {
* been initialized and when there's data available.
* => encoder->RegisterListener(listener);
*
* 3) When the MediaEncoderListener is notified that the MediaEncoder has
* data available, we can encode data. This also encodes metadata on its
* first invocation.
* 3) Connect the MediaStreamTracks to be recorded.
* => encoder->ConnectMediaStreamTrack(track);
* This creates the corresponding TrackEncoder and connects the track and
* the TrackEncoder through a track listener. This also starts encoding.
*
* 4) When the MediaEncoderListener is notified that the MediaEncoder is
* initialized, we can encode metadata.
* => encoder->GetEncodedMetadata(...);
*
* 5) When the MediaEncoderListener is notified that the MediaEncoder has
* data available, we can encode data.
* => encoder->GetEncodedData(...);
*
* 4) To stop encoding, there are multiple options:
* 6) To stop encoding, there are multiple options:
*
* 4.1) Stop() for a graceful stop.
* 6.1) Stop() for a graceful stop.
* => encoder->Stop();
*
* 4.2) Cancel() for an immediate stop, if you don't need the data currently
* 6.2) Cancel() for an immediate stop, if you don't need the data currently
* buffered.
* => encoder->Cancel();
*
* 4.3) When all input tracks end, the MediaEncoder will automatically stop
* 6.3) When all input tracks end, the MediaEncoder will automatically stop
* and shut down.
*/
class MediaEncoder {
@ -151,12 +157,24 @@ class MediaEncoder {
uint32_t aAudioBitrate, uint32_t aVideoBitrate, uint8_t aTrackTypes,
TrackRate aTrackRate);
/**
* Encodes raw metadata for all tracks to aOutputBufs. aMIMEType is the valid
* mime-type for the returned container data. The buffer of container data is
* allocated in ContainerWriter::GetContainerData().
*
* Should there be insufficient input data for either track encoder to infer
* the metadata, or if metadata has already been encoded, we return an error
* and the output arguments are undefined. Otherwise we return NS_OK.
*/
nsresult GetEncodedMetadata(nsTArray<nsTArray<uint8_t>>* aOutputBufs,
nsAString& aMIMEType);
/**
* Encodes raw data for all tracks to aOutputBufs. The buffer of container
* data is allocated in ContainerWriter::GetContainerData().
*
* On its first call, metadata is also encoded. TrackEncoders must have been
* initialized before this is called.
* This implies that metadata has already been encoded and that all track
* encoders are still active. Should either implication break, we return an
* error and the output argument is undefined. Otherwise we return NS_OK.
*/
nsresult GetEncodedData(nsTArray<nsTArray<uint8_t>>* aOutputBufs);
@ -178,8 +196,6 @@ class MediaEncoder {
static bool IsWebMEncoderEnabled();
#endif
const nsString& MimeType() const;
/**
* Notifies listeners that this MediaEncoder has been initialized.
*/
@ -237,10 +253,15 @@ class MediaEncoder {
*/
void SetError();
// Get encoded data from trackEncoder and write to muxer
nsresult WriteEncodedDataToMuxer(TrackEncoder* aTrackEncoder);
// Get metadata from trackEncoder and copy to muxer
nsresult CopyMetadataToMuxer(TrackEncoder* aTrackEncoder);
const RefPtr<TaskQueue> mEncoderThread;
const RefPtr<DriftCompensator> mDriftCompensator;
UniquePtr<Muxer> mMuxer;
UniquePtr<ContainerWriter> mWriter;
RefPtr<AudioTrackEncoder> mAudioEncoder;
RefPtr<AudioTrackListener> mAudioListener;
RefPtr<VideoTrackEncoder> mVideoEncoder;
@ -263,10 +284,10 @@ class MediaEncoder {
// A video track that we are encoding. Will be null if the input stream
// doesn't contain video on start() or if the input is an AudioNode.
RefPtr<dom::VideoStreamTrack> mVideoTrack;
TimeStamp mStartTime;
const nsString mMIMEType;
nsString mMIMEType;
bool mInitialized;
bool mMetadataEncoded;
bool mCompleted;
bool mError;
bool mCanceled;

View File

@ -1,228 +0,0 @@
/* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*-*/
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this file,
* You can obtain one at http://mozilla.org/MPL/2.0/. */
#include "Muxer.h"
#include "ContainerWriter.h"
namespace mozilla {
LazyLogModule gMuxerLog("Muxer");
#define LOG(type, ...) MOZ_LOG(gMuxerLog, type, (__VA_ARGS__))
Muxer::Muxer(UniquePtr<ContainerWriter> aWriter)
: mWriter(std::move(aWriter)) {}
bool Muxer::IsFinished() { return mWriter->IsWritingComplete(); }
nsresult Muxer::SetMetadata(
const nsTArray<RefPtr<TrackMetadataBase>>& aMetadata) {
nsresult rv = mWriter->SetMetadata(aMetadata);
if (NS_FAILED(rv)) {
LOG(LogLevel::Error, "%p Setting metadata failed, tracks=%zu", this,
aMetadata.Length());
return rv;
}
for (const auto& track : aMetadata) {
switch (track->GetKind()) {
case TrackMetadataBase::METADATA_OPUS: {
// In the case of Opus we need to calculate the codec delay based on the
// pre-skip. For more information see:
// https://tools.ietf.org/html/rfc7845#section-4.2
// Calculate offset in microseconds
OpusMetadata* opusMeta = static_cast<OpusMetadata*>(track.get());
mAudioCodecDelay = static_cast<uint64_t>(
LittleEndian::readUint16(opusMeta->mIdHeader.Elements() + 10) *
PR_USEC_PER_SEC / 48000);
MOZ_FALLTHROUGH;
}
case TrackMetadataBase::METADATA_VORBIS:
case TrackMetadataBase::METADATA_AAC:
case TrackMetadataBase::METADATA_AMR:
case TrackMetadataBase::METADATA_EVRC:
MOZ_ASSERT(!mHasAudio, "Only one audio track supported");
mHasAudio = true;
break;
case TrackMetadataBase::METADATA_VP8:
MOZ_ASSERT(!mHasVideo, "Only one video track supported");
mHasVideo = true;
break;
default:
MOZ_CRASH("Unknown codec metadata");
};
}
mMetadataSet = true;
MOZ_ASSERT(mHasAudio || mHasVideo);
if (!mHasAudio) {
mEncodedAudioFrames.Finish();
MOZ_ASSERT(mEncodedAudioFrames.AtEndOfStream());
}
if (!mHasVideo) {
mEncodedVideoFrames.Finish();
MOZ_ASSERT(mEncodedVideoFrames.AtEndOfStream());
}
LOG(LogLevel::Info, "%p Metadata set; audio=%d, video=%d", this, mHasAudio,
mHasVideo);
return rv;
}
void Muxer::AddEncodedAudioFrame(EncodedFrame* aFrame) {
MOZ_ASSERT(mMetadataSet);
MOZ_ASSERT(mHasAudio);
if (aFrame->mFrameType == EncodedFrame::FrameType::OPUS_AUDIO_FRAME) {
aFrame->mTime += mAudioCodecDelay;
}
mEncodedAudioFrames.Push(aFrame);
LOG(LogLevel::Verbose,
"%p Added audio frame of type %u, [start %" PRIu64 ", end %" PRIu64 ")",
this, aFrame->mFrameType, aFrame->mTime,
aFrame->mTime + aFrame->mDuration);
}
void Muxer::AddEncodedVideoFrame(EncodedFrame* aFrame) {
MOZ_ASSERT(mMetadataSet);
MOZ_ASSERT(mHasVideo);
mEncodedVideoFrames.Push(aFrame);
LOG(LogLevel::Verbose,
"%p Added video frame of type %u, [start %" PRIu64 ", end %" PRIu64 ")",
this, aFrame->mFrameType, aFrame->mTime,
aFrame->mTime + aFrame->mDuration);
}
void Muxer::AudioEndOfStream() {
MOZ_ASSERT(mMetadataSet);
MOZ_ASSERT(mHasAudio);
LOG(LogLevel::Info, "%p Reached audio EOS", this);
mEncodedAudioFrames.Finish();
}
void Muxer::VideoEndOfStream() {
MOZ_ASSERT(mMetadataSet);
MOZ_ASSERT(mHasVideo);
LOG(LogLevel::Info, "%p Reached video EOS", this);
mEncodedVideoFrames.Finish();
}
nsresult Muxer::GetData(nsTArray<nsTArray<uint8_t>>* aOutputBuffers) {
MOZ_ASSERT(mMetadataSet);
MOZ_ASSERT(mHasAudio || mHasVideo);
nsresult rv;
if (!mMetadataEncoded) {
rv = mWriter->GetContainerData(aOutputBuffers, ContainerWriter::GET_HEADER);
if (NS_FAILED(rv)) {
LOG(LogLevel::Error, "%p Failed getting metadata from writer", this);
return rv;
}
mMetadataEncoded = true;
}
if (mEncodedAudioFrames.GetSize() == 0 && !mEncodedAudioFrames.IsFinished() &&
mEncodedVideoFrames.GetSize() == 0 && !mEncodedVideoFrames.IsFinished()) {
// Nothing to mux.
return NS_OK;
}
rv = Mux();
if (NS_FAILED(rv)) {
LOG(LogLevel::Error, "%p Failed muxing data into writer", this);
return rv;
}
MOZ_ASSERT_IF(
mEncodedAudioFrames.IsFinished() && mEncodedVideoFrames.IsFinished(),
mEncodedAudioFrames.AtEndOfStream());
MOZ_ASSERT_IF(
mEncodedAudioFrames.IsFinished() && mEncodedVideoFrames.IsFinished(),
mEncodedVideoFrames.AtEndOfStream());
uint32_t flags =
mEncodedAudioFrames.AtEndOfStream() && mEncodedVideoFrames.AtEndOfStream()
? ContainerWriter::FLUSH_NEEDED
: 0;
if (mEncodedAudioFrames.AtEndOfStream() &&
mEncodedVideoFrames.AtEndOfStream()) {
LOG(LogLevel::Info, "%p All data written", this);
}
return mWriter->GetContainerData(aOutputBuffers, flags);
}
nsresult Muxer::Mux() {
MOZ_ASSERT(mMetadataSet);
MOZ_ASSERT(mHasAudio || mHasVideo);
nsTArray<RefPtr<EncodedFrame>> frames;
// The times at which we expect our next video and audio frames. These are
// based on the time + duration (GetEndTime()) of the last seen frames.
// Assumes that the encoders write the correct duration for frames.;
uint64_t expectedNextVideoTime = 0;
uint64_t expectedNextAudioTime = 0;
// Interleave frames until we're out of audio or video
while (mEncodedVideoFrames.GetSize() > 0 &&
mEncodedAudioFrames.GetSize() > 0) {
RefPtr<EncodedFrame> videoFrame = mEncodedVideoFrames.PeekFront();
RefPtr<EncodedFrame> audioFrame = mEncodedAudioFrames.PeekFront();
// For any expected time our frames should occur at or after that time.
MOZ_ASSERT(videoFrame->mTime >= expectedNextVideoTime);
MOZ_ASSERT(audioFrame->mTime >= expectedNextAudioTime);
if (videoFrame->mTime <= audioFrame->mTime) {
expectedNextVideoTime = videoFrame->GetEndTime();
RefPtr<EncodedFrame> frame = mEncodedVideoFrames.PopFront();
frames.AppendElement(frame);
} else {
expectedNextAudioTime = audioFrame->GetEndTime();
RefPtr<EncodedFrame> frame = mEncodedAudioFrames.PopFront();
frames.AppendElement(frame);
}
}
// If we're out of audio we still may be able to add more video...
if (mEncodedAudioFrames.GetSize() == 0) {
while (mEncodedVideoFrames.GetSize() > 0) {
if (!mEncodedAudioFrames.AtEndOfStream() &&
mEncodedVideoFrames.PeekFront()->mTime > expectedNextAudioTime) {
// Audio encoding is not complete and since the video frame comes
// after our next audio frame we cannot safely add it.
break;
}
frames.AppendElement(mEncodedVideoFrames.PopFront());
}
}
// If we're out of video we still may be able to add more audio...
if (mEncodedVideoFrames.GetSize() == 0) {
while (mEncodedAudioFrames.GetSize() > 0) {
if (!mEncodedVideoFrames.AtEndOfStream() &&
mEncodedAudioFrames.PeekFront()->mTime > expectedNextVideoTime) {
// Video encoding is not complete and since the audio frame comes
// after our next video frame we cannot safely add it.
break;
}
frames.AppendElement(mEncodedAudioFrames.PopFront());
}
}
LOG(LogLevel::Debug,
"%p Muxed data, remaining-audio=%zu, remaining-video=%zu", this,
mEncodedAudioFrames.GetSize(), mEncodedVideoFrames.GetSize());
// If encoding is complete for both encoders we should signal end of stream,
// otherwise we keep going.
uint32_t flags =
mEncodedVideoFrames.AtEndOfStream() && mEncodedAudioFrames.AtEndOfStream()
? ContainerWriter::END_OF_STREAM
: 0;
nsresult rv = mWriter->WriteEncodedTrack(frames, flags);
if (NS_FAILED(rv)) {
LOG(LogLevel::Error, "Error! Failed to write muxed data to the container");
}
return rv;
}
} // namespace mozilla
#undef LOG

View File

@ -1,74 +0,0 @@
/* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*-*/
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this file,
* You can obtain one at http://mozilla.org/MPL/2.0/. */
#ifndef DOM_MEDIA_ENCODER_MUXER_H_
#define DOM_MEDIA_ENCODER_MUXER_H_
#include "MediaQueue.h"
namespace mozilla {
class ContainerWriter;
// Generic Muxer class that helps pace the output from track encoders to the
// ContainerWriter, so time never appears to go backwards.
// Note that the entire class is written for single threaded access.
class Muxer {
public:
explicit Muxer(UniquePtr<ContainerWriter> aWriter);
~Muxer() = default;
// Returns true when all tracks have ended, and all data has been muxed and
// fetched.
bool IsFinished();
// Returns true if this muxer has not been given metadata yet.
bool NeedsMetadata() const { return !mMetadataSet; }
// Sets metadata for all tracks. This may only be called once.
nsresult SetMetadata(const nsTArray<RefPtr<TrackMetadataBase>>& aMetadata);
// Adds an encoded audio frame for muxing
void AddEncodedAudioFrame(EncodedFrame* aFrame);
// Adds an encoded video frame for muxing
void AddEncodedVideoFrame(EncodedFrame* aFrame);
// Marks the audio track as ended. Once all tracks for which we have metadata
// have ended, GetData() will drain and the muxer will be marked as finished.
void AudioEndOfStream();
// Marks the video track as ended. Once all tracks for which we have metadata
// have ended, GetData() will drain and the muxer will be marked as finished.
void VideoEndOfStream();
// Gets the data that has been muxed and written into the container so far.
nsresult GetData(nsTArray<nsTArray<uint8_t>>* aOutputBuffers);
private:
// Writes data in MediaQueues to the ContainerWriter.
nsresult Mux();
// Audio frames that have been encoded and are pending write to the muxer.
MediaQueue<EncodedFrame> mEncodedAudioFrames;
// Video frames that have been encoded and are pending write to the muxer.
MediaQueue<EncodedFrame> mEncodedVideoFrames;
// The writer for the specific container we're recording into.
UniquePtr<ContainerWriter> mWriter;
// How much each audio time stamp should be delayed in microseconds. Used to
// adjust for opus codec delay.
uint64_t mAudioCodecDelay = 0;
// True once metadata has been set in the muxer.
bool mMetadataSet = false;
// True once metadata has been written to file.
bool mMetadataEncoded = false;
// True if metadata is set and contains an audio track.
bool mHasAudio = false;
// True if metadata is set and contains a video track.
bool mHasVideo = false;
};
} // namespace mozilla
#endif

View File

@ -10,6 +10,7 @@
#include <opus/opus.h>
#undef LOG
#define LOG(args, ...)
namespace mozilla {
@ -227,8 +228,7 @@ already_AddRefed<TrackMetadataBase> OpusTrackEncoder::GetMetadata() {
return meta.forget();
}
nsresult OpusTrackEncoder::GetEncodedTrack(
nsTArray<RefPtr<EncodedFrame>>& aData) {
nsresult OpusTrackEncoder::GetEncodedTrack(EncodedFrameContainer& aData) {
AUTO_PROFILER_LABEL("OpusTrackEncoder::GetEncodedTrack", OTHER);
MOZ_ASSERT(mInitialized || mCanceled);
@ -325,7 +325,7 @@ nsresult OpusTrackEncoder::GetEncodedTrack(
MOZ_ASSERT(frameCopied <= 3844, "frameCopied exceeded expected range");
RefPtr<EncodedFrame> audiodata = new EncodedFrame();
audiodata->mFrameType = EncodedFrame::OPUS_AUDIO_FRAME;
audiodata->SetFrameType(EncodedFrame::OPUS_AUDIO_FRAME);
int framesInPCM = frameCopied;
if (mResampler) {
AutoTArray<AudioDataValue, 9600> resamplingDest;
@ -367,10 +367,10 @@ nsresult OpusTrackEncoder::GetEncodedTrack(
mResampledLeftover.Length());
// This is always at 48000Hz.
framesInPCM = framesLeft + outframesToCopy;
audiodata->mDuration = framesInPCM;
audiodata->SetDuration(framesInPCM);
} else {
// The ogg time stamping and pre-skip is always timed at 48000.
audiodata->mDuration = frameCopied * (kOpusSamplingRate / mSamplingRate);
audiodata->SetDuration(frameCopied * (kOpusSamplingRate / mSamplingRate));
}
// Remove the raw data which has been pulled to pcm buffer.
@ -422,16 +422,14 @@ nsresult OpusTrackEncoder::GetEncodedTrack(
audiodata->SwapInFrameData(frameData);
// timestamp should be the time of the first sample
audiodata->mTime = mOutputTimeStamp;
audiodata->SetTimeStamp(mOutputTimeStamp);
mOutputTimeStamp +=
FramesToUsecs(GetPacketDuration(), kOpusSamplingRate).value();
LOG("[Opus] mOutputTimeStamp %lld.", mOutputTimeStamp);
aData.AppendElement(audiodata);
aData.AppendEncodedFrame(audiodata);
}
return result >= 0 ? NS_OK : NS_ERROR_FAILURE;
}
} // namespace mozilla
#undef LOG

View File

@ -33,7 +33,7 @@ class OpusTrackEncoder : public AudioTrackEncoder {
already_AddRefed<TrackMetadataBase> GetMetadata() override;
nsresult GetEncodedTrack(nsTArray<RefPtr<EncodedFrame>>& aData) override;
nsresult GetEncodedTrack(EncodedFrameContainer& aData) override;
protected:
int GetPacketDuration() override;

View File

@ -763,5 +763,3 @@ void VideoTrackEncoder::SetKeyFrameInterval(int32_t aKeyFrameInterval) {
}
} // namespace mozilla
#undef TRACK_LOG

View File

@ -7,7 +7,7 @@
#define TrackEncoder_h_
#include "AudioSegment.h"
#include "EncodedFrame.h"
#include "EncodedFrameContainer.h"
#include "MediaStreamGraph.h"
#include "StreamTracks.h"
#include "TrackMetadataBase.h"
@ -82,7 +82,7 @@ class TrackEncoder {
* Encodes raw segments. Result data is returned in aData, and called on the
* worker thread.
*/
virtual nsresult GetEncodedTrack(nsTArray<RefPtr<EncodedFrame>>& aData) = 0;
virtual nsresult GetEncodedTrack(EncodedFrameContainer& aData) = 0;
/**
* Returns true once this TrackEncoder is initialized.

View File

@ -220,8 +220,7 @@ already_AddRefed<TrackMetadataBase> VP8TrackEncoder::GetMetadata() {
return meta.forget();
}
nsresult VP8TrackEncoder::GetEncodedPartitions(
nsTArray<RefPtr<EncodedFrame>>& aData) {
nsresult VP8TrackEncoder::GetEncodedPartitions(EncodedFrameContainer& aData) {
vpx_codec_iter_t iter = nullptr;
EncodedFrame::FrameType frameType = EncodedFrame::VP8_P_FRAME;
nsTArray<uint8_t> frameData;
@ -250,7 +249,7 @@ nsresult VP8TrackEncoder::GetEncodedPartitions(
if (!frameData.IsEmpty()) {
// Copy the encoded data to aData.
EncodedFrame* videoData = new EncodedFrame();
videoData->mFrameType = frameType;
videoData->SetFrameType(frameType);
// Convert the timestamp and duration to Usecs.
CheckedInt64 timestamp = FramesToUsecs(pkt->data.frame.pts, mTrackRate);
@ -258,7 +257,7 @@ nsresult VP8TrackEncoder::GetEncodedPartitions(
NS_ERROR("Microsecond timestamp overflow");
return NS_ERROR_DOM_MEDIA_OVERFLOW_ERR;
}
videoData->mTime = (uint64_t)timestamp.value();
videoData->SetTimeStamp((uint64_t)timestamp.value());
mExtractedDuration += pkt->data.frame.duration;
if (!mExtractedDuration.isValid()) {
@ -280,13 +279,14 @@ nsresult VP8TrackEncoder::GetEncodedPartitions(
}
mExtractedDurationUs = totalDuration;
videoData->mDuration = (uint64_t)duration.value();
videoData->SetDuration((uint64_t)duration.value());
videoData->SwapInFrameData(frameData);
VP8LOG(LogLevel::Verbose,
"GetEncodedPartitions TimeStamp %" PRIu64 ", Duration %" PRIu64
", FrameType %d",
videoData->mTime, videoData->mDuration, videoData->mFrameType);
aData.AppendElement(videoData);
videoData->GetTimeStamp(), videoData->GetDuration(),
videoData->GetFrameType());
aData.AppendEncodedFrame(videoData);
}
return pkt ? NS_OK : NS_ERROR_NOT_AVAILABLE;
@ -441,8 +441,7 @@ VP8TrackEncoder::EncodeOperation VP8TrackEncoder::GetNextEncodeOperation(
* encode it.
* 4. Remove the encoded chunks in mSourceSegment after for-loop.
*/
nsresult VP8TrackEncoder::GetEncodedTrack(
nsTArray<RefPtr<EncodedFrame>>& aData) {
nsresult VP8TrackEncoder::GetEncodedTrack(EncodedFrameContainer& aData) {
AUTO_PROFILER_LABEL("VP8TrackEncoder::GetEncodedTrack", OTHER);
MOZ_ASSERT(mInitialized || mCanceled);
@ -510,7 +509,7 @@ nsresult VP8TrackEncoder::GetEncodedTrack(
// because this frame will be skipped.
VP8LOG(LogLevel::Warning,
"MediaRecorder lagging behind. Skipping a frame.");
RefPtr<EncodedFrame> last = aData.LastElement();
RefPtr<EncodedFrame> last = aData.GetEncodedFrames().LastElement();
if (last) {
mExtractedDuration += chunk.mDuration;
if (!mExtractedDuration.isValid()) {
@ -526,7 +525,8 @@ nsresult VP8TrackEncoder::GetEncodedTrack(
NS_ERROR("skipped duration overflow");
return NS_ERROR_DOM_MEDIA_OVERFLOW_ERR;
}
last->mDuration += static_cast<uint64_t>(skippedDuration.value());
last->SetDuration(last->GetDuration() +
(static_cast<uint64_t>(skippedDuration.value())));
}
}
@ -570,5 +570,3 @@ nsresult VP8TrackEncoder::GetEncodedTrack(
}
} // namespace mozilla
#undef VP8LOG

View File

@ -34,7 +34,7 @@ class VP8TrackEncoder : public VideoTrackEncoder {
already_AddRefed<TrackMetadataBase> GetMetadata() final;
nsresult GetEncodedTrack(nsTArray<RefPtr<EncodedFrame>>& aData) final;
nsresult GetEncodedTrack(EncodedFrameContainer& aData) final;
protected:
nsresult Init(int32_t aWidth, int32_t aHeight, int32_t aDisplayWidth,
@ -50,7 +50,7 @@ class VP8TrackEncoder : public VideoTrackEncoder {
// null for EOS detection.
// NS_OK if some data was appended to aData.
// An error nsresult otherwise.
nsresult GetEncodedPartitions(nsTArray<RefPtr<EncodedFrame>>& aData);
nsresult GetEncodedPartitions(EncodedFrameContainer& aData);
// Prepare the input data to the mVPXImageWrapper for encoding.
nsresult PrepareRawFrame(VideoChunk& aChunk);

View File

@ -9,7 +9,7 @@ with Files('*'):
EXPORTS += [
'ContainerWriter.h',
'EncodedFrame.h',
'EncodedFrameContainer.h',
'MediaEncoder.h',
'OpusTrackEncoder.h',
'TrackEncoder.h',
@ -18,7 +18,6 @@ EXPORTS += [
UNIFIED_SOURCES += [
'MediaEncoder.cpp',
'Muxer.cpp',
'OpusTrackEncoder.cpp',
'TrackEncoder.cpp',
]

View File

@ -1,27 +0,0 @@
/* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
/* vim: set ts=8 sts=2 et sw=2 tw=80: */
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
#include "AudioGenerator.h"
#include "AudioSegment.h"
using namespace mozilla;
AudioGenerator::AudioGenerator(int32_t aChannels, int32_t aSampleRate)
: mGenerator(aSampleRate, 1000), mChannels(aChannels) {}
void AudioGenerator::Generate(AudioSegment& aSegment, const int32_t& aSamples) {
RefPtr<SharedBuffer> buffer =
SharedBuffer::Create(aSamples * sizeof(int16_t));
int16_t* dest = static_cast<int16_t*>(buffer->Data());
mGenerator.generate(dest, aSamples);
AutoTArray<const int16_t*, 1> channels;
for (int32_t i = 0; i < mChannels; i++) {
channels.AppendElement(dest);
}
aSegment.AppendFrames(buffer.forget(), channels, aSamples,
PRINCIPAL_HANDLE_NONE);
}

View File

@ -1,27 +0,0 @@
/* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
/* vim: set ts=8 sts=2 et sw=2 tw=80: */
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
#ifndef DOM_MEDIA_GTEST_AUDIO_GENERATOR_H_
#define DOM_MEDIA_GTEST_AUDIO_GENERATOR_H_
#include "prtime.h"
#include "SineWaveGenerator.h"
namespace mozilla {
class AudioSegment;
}
class AudioGenerator {
public:
AudioGenerator(int32_t aChannels, int32_t aSampleRate);
void Generate(mozilla::AudioSegment& aSegment, const int32_t& aSamples);
private:
mozilla::SineWaveGenerator mGenerator;
const int32_t mChannels;
};
#endif // DOM_MEDIA_GTEST_AUDIO_GENERATOR_H_

View File

@ -5,11 +5,33 @@
#include "gtest/gtest.h"
#include "OpusTrackEncoder.h"
#include "AudioGenerator.h"
#include "SineWaveGenerator.h"
using namespace mozilla;
class AudioGenerator {
public:
AudioGenerator(int32_t aChannels, int32_t aSampleRate)
: mGenerator(aSampleRate, 1000), mChannels(aChannels) {}
void Generate(AudioSegment& aSegment, const int32_t& aSamples) {
RefPtr<SharedBuffer> buffer =
SharedBuffer::Create(aSamples * sizeof(int16_t));
int16_t* dest = static_cast<int16_t*>(buffer->Data());
mGenerator.generate(dest, aSamples);
AutoTArray<const int16_t*, 1> channels;
for (int32_t i = 0; i < mChannels; i++) {
channels.AppendElement(dest);
}
aSegment.AppendFrames(buffer.forget(), channels, aSamples,
PRINCIPAL_HANDLE_NONE);
}
private:
SineWaveGenerator mGenerator;
const int32_t mChannels;
};
class TestOpusTrackEncoder : public OpusTrackEncoder {
public:
TestOpusTrackEncoder() : OpusTrackEncoder(90000) {}
@ -201,13 +223,13 @@ TEST(OpusAudioTrackEncoder, FrameEncode)
encoder.AppendAudioSegment(std::move(segment));
nsTArray<RefPtr<EncodedFrame>> frames;
EXPECT_TRUE(NS_SUCCEEDED(encoder.GetEncodedTrack(frames)));
EncodedFrameContainer container;
EXPECT_TRUE(NS_SUCCEEDED(encoder.GetEncodedTrack(container)));
// Verify that encoded data is 5 seconds long.
uint64_t totalDuration = 0;
for (auto& frame : frames) {
totalDuration += frame->mDuration;
for (auto& frame : container.GetEncodedFrames()) {
totalDuration += frame->GetDuration();
}
// 44100 as used above gets resampled to 48000 for opus.
const uint64_t five = 48000 * 5;

View File

@ -1,213 +0,0 @@
/* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*-*/
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this file,
* You can obtain one at http://mozilla.org/MPL/2.0/. */
#include <vector>
#include "ContainerWriter.h"
#include "EncodedFrame.h"
#include "gtest/gtest.h"
#include "gmock/gmock.h"
#include "Muxer.h"
#include "OpusTrackEncoder.h"
#include "WebMWriter.h"
using namespace mozilla;
using testing::_;
using testing::ElementsAre;
using testing::Return;
using testing::StaticAssertTypeEq;
static RefPtr<TrackMetadataBase> CreateOpusMetadata(int32_t aChannels,
float aSamplingFrequency,
size_t aIdHeaderSize,
size_t aCommentHeaderSize) {
auto opusMetadata = MakeRefPtr<OpusMetadata>();
opusMetadata->mChannels = aChannels;
opusMetadata->mSamplingFrequency = aSamplingFrequency;
opusMetadata->mIdHeader.SetLength(aIdHeaderSize);
for (size_t i = 0; i < opusMetadata->mIdHeader.Length(); i++) {
opusMetadata->mIdHeader[i] = 0;
}
opusMetadata->mCommentHeader.SetLength(aCommentHeaderSize);
for (size_t i = 0; i < opusMetadata->mCommentHeader.Length(); i++) {
opusMetadata->mCommentHeader[i] = 0;
}
return opusMetadata;
}
static RefPtr<TrackMetadataBase> CreateVP8Metadata(int32_t aWidth,
int32_t aHeight) {
auto vp8Metadata = MakeRefPtr<VP8Metadata>();
vp8Metadata->mWidth = aWidth;
vp8Metadata->mDisplayWidth = aWidth;
vp8Metadata->mHeight = aHeight;
vp8Metadata->mDisplayHeight = aHeight;
return vp8Metadata;
}
static RefPtr<EncodedFrame> CreateFrame(EncodedFrame::FrameType aType,
uint64_t aTimeUs, uint64_t aDurationUs,
size_t aDataSize) {
auto frame = MakeRefPtr<EncodedFrame>();
frame->mTime = aTimeUs;
if (aType == EncodedFrame::OPUS_AUDIO_FRAME) {
// Opus duration is in samples, so figure out how many samples will put us
// closest to aDurationUs without going over.
frame->mDuration = UsecsToFrames(aDurationUs, 48000).value();
} else {
frame->mDuration = aDurationUs;
}
frame->mFrameType = aType;
nsTArray<uint8_t> data;
data.SetLength(aDataSize);
frame->SwapInFrameData(data);
return frame;
}
namespace testing {
namespace internal {
// This makes the googletest framework treat nsTArray as an std::vector, so all
// the regular Matchers (like ElementsAre) work for it.
template <typename Element>
class StlContainerView<nsTArray<Element>> {
public:
typedef GTEST_REMOVE_CONST_(Element) RawElement;
typedef std::vector<RawElement> type;
typedef const type const_reference;
static const_reference ConstReference(const nsTArray<Element>& aContainer) {
StaticAssertTypeEq<Element, RawElement>();
return type(aContainer.begin(), aContainer.end());
}
static type Copy(const nsTArray<Element>& aContainer) {
return type(aContainer.begin(), aContainer.end());
}
};
} // namespace internal
} // namespace testing
class MockContainerWriter : public ContainerWriter {
public:
MOCK_METHOD2(WriteEncodedTrack,
nsresult(const nsTArray<RefPtr<EncodedFrame>>&, uint32_t));
MOCK_METHOD1(SetMetadata,
nsresult(const nsTArray<RefPtr<TrackMetadataBase>>&));
MOCK_METHOD0(IsWritingComplete, bool());
MOCK_METHOD2(GetContainerData,
nsresult(nsTArray<nsTArray<uint8_t>>*, uint32_t));
};
TEST(MuxerTest, AudioOnly)
{
MockContainerWriter* writer = new MockContainerWriter();
Muxer muxer(WrapUnique<ContainerWriter>(writer));
// Prepare data
auto opusMeta = CreateOpusMetadata(1, 48000, 16, 16);
auto audioFrame = CreateFrame(EncodedFrame::OPUS_AUDIO_FRAME, 0, 48000, 4096);
// Expectations
EXPECT_CALL(*writer, SetMetadata(ElementsAre(opusMeta)))
.WillOnce(Return(NS_OK));
EXPECT_CALL(*writer, WriteEncodedTrack(ElementsAre(audioFrame),
ContainerWriter::END_OF_STREAM))
.WillOnce(Return(NS_OK));
EXPECT_CALL(*writer, GetContainerData(_, ContainerWriter::GET_HEADER))
.WillOnce(Return(NS_OK));
EXPECT_CALL(*writer, GetContainerData(_, ContainerWriter::FLUSH_NEEDED))
.WillOnce(Return(NS_OK));
EXPECT_CALL(*writer, IsWritingComplete()).Times(0);
// Test
EXPECT_EQ(muxer.SetMetadata(nsTArray<RefPtr<TrackMetadataBase>>({opusMeta})),
NS_OK);
muxer.AddEncodedAudioFrame(audioFrame);
muxer.AudioEndOfStream();
nsTArray<nsTArray<uint8_t>> buffers;
EXPECT_EQ(muxer.GetData(&buffers), NS_OK);
}
TEST(MuxerTest, AudioVideo)
{
MockContainerWriter* writer = new MockContainerWriter();
Muxer muxer(WrapUnique<ContainerWriter>(writer));
// Prepare data
auto opusMeta = CreateOpusMetadata(1, 48000, 16, 16);
auto vp8Meta = CreateVP8Metadata(640, 480);
auto audioFrame = CreateFrame(EncodedFrame::OPUS_AUDIO_FRAME, 0, 48000, 4096);
auto videoFrame = CreateFrame(EncodedFrame::VP8_I_FRAME, 0, 50000, 65536);
// Expectations
EXPECT_CALL(*writer, SetMetadata(ElementsAre(opusMeta, vp8Meta)))
.WillOnce(Return(NS_OK));
EXPECT_CALL(*writer, WriteEncodedTrack(ElementsAre(videoFrame, audioFrame),
ContainerWriter::END_OF_STREAM))
.WillOnce(Return(NS_OK));
EXPECT_CALL(*writer, GetContainerData(_, ContainerWriter::GET_HEADER))
.WillOnce(Return(NS_OK));
EXPECT_CALL(*writer, GetContainerData(_, ContainerWriter::FLUSH_NEEDED))
.WillOnce(Return(NS_OK));
EXPECT_CALL(*writer, IsWritingComplete()).Times(0);
// Test
EXPECT_EQ(muxer.SetMetadata(
nsTArray<RefPtr<TrackMetadataBase>>({opusMeta, vp8Meta})),
NS_OK);
muxer.AddEncodedAudioFrame(audioFrame);
muxer.AudioEndOfStream();
muxer.AddEncodedVideoFrame(videoFrame);
muxer.VideoEndOfStream();
nsTArray<nsTArray<uint8_t>> buffers;
EXPECT_EQ(muxer.GetData(&buffers), NS_OK);
}
TEST(MuxerTest, AudioVideoOutOfOrder)
{
MockContainerWriter* writer = new MockContainerWriter();
Muxer muxer(WrapUnique<ContainerWriter>(writer));
// Prepare data
auto opusMeta = CreateOpusMetadata(1, 48000, 16, 16);
auto vp8Meta = CreateVP8Metadata(640, 480);
auto a0 = CreateFrame(EncodedFrame::OPUS_AUDIO_FRAME, 0, 48, 4096);
auto v0 = CreateFrame(EncodedFrame::VP8_I_FRAME, 0, 50, 65536);
auto a48 = CreateFrame(EncodedFrame::OPUS_AUDIO_FRAME, 48, 48, 4096);
auto v50 = CreateFrame(EncodedFrame::VP8_I_FRAME, 50, 50, 65536);
// Expectations
EXPECT_CALL(*writer, SetMetadata(ElementsAre(opusMeta, vp8Meta)))
.WillOnce(Return(NS_OK));
EXPECT_CALL(*writer, WriteEncodedTrack(ElementsAre(v0, a0, a48, v50),
ContainerWriter::END_OF_STREAM))
.WillOnce(Return(NS_OK));
EXPECT_CALL(*writer, GetContainerData(_, ContainerWriter::GET_HEADER))
.WillOnce(Return(NS_OK));
EXPECT_CALL(*writer, GetContainerData(_, ContainerWriter::FLUSH_NEEDED))
.WillOnce(Return(NS_OK));
EXPECT_CALL(*writer, IsWritingComplete()).Times(0);
// Test
EXPECT_EQ(muxer.SetMetadata(
nsTArray<RefPtr<TrackMetadataBase>>({opusMeta, vp8Meta})),
NS_OK);
muxer.AddEncodedAudioFrame(a0);
muxer.AddEncodedVideoFrame(v0);
muxer.AddEncodedVideoFrame(v50);
muxer.VideoEndOfStream();
muxer.AddEncodedAudioFrame(a48);
muxer.AudioEndOfStream();
nsTArray<nsTArray<uint8_t>> buffers;
EXPECT_EQ(muxer.GetData(&buffers), NS_OK);
}

View File

@ -143,8 +143,8 @@ TEST(VP8VideoTrackEncoder, FrameEncode)
encoder.AdvanceCurrentTime(now + TimeDuration::FromSeconds(images.Length()));
// Pull Encoded Data back from encoder.
nsTArray<RefPtr<EncodedFrame>> frames;
EXPECT_TRUE(NS_SUCCEEDED(encoder.GetEncodedTrack(frames)));
EncodedFrameContainer container;
EXPECT_TRUE(NS_SUCCEEDED(encoder.GetEncodedTrack(container)));
}
// Test that encoding a single frame gives useful output.
@ -165,20 +165,21 @@ TEST(VP8VideoTrackEncoder, SingleFrameEncode)
encoder.AdvanceCurrentTime(now + TimeDuration::FromSeconds(0.5));
encoder.NotifyEndOfStream();
nsTArray<RefPtr<EncodedFrame>> frames;
ASSERT_TRUE(NS_SUCCEEDED(encoder.GetEncodedTrack(frames)));
EncodedFrameContainer container;
ASSERT_TRUE(NS_SUCCEEDED(encoder.GetEncodedTrack(container)));
EXPECT_TRUE(encoder.IsEncodingComplete());
// Read out encoded data, and verify.
const nsTArray<RefPtr<EncodedFrame>>& frames = container.GetEncodedFrames();
const size_t oneElement = 1;
ASSERT_EQ(oneElement, frames.Length());
EXPECT_EQ(EncodedFrame::VP8_I_FRAME, frames[0]->mFrameType)
EXPECT_EQ(EncodedFrame::VP8_I_FRAME, frames[0]->GetFrameType())
<< "We only have one frame, so it should be a keyframe";
const uint64_t halfSecond = PR_USEC_PER_SEC / 2;
EXPECT_EQ(halfSecond, frames[0]->mDuration);
EXPECT_EQ(halfSecond, frames[0]->GetDuration());
}
// Test that encoding a couple of identical images gives useful output.
@ -203,15 +204,15 @@ TEST(VP8VideoTrackEncoder, SameFrameEncode)
encoder.AdvanceCurrentTime(now + TimeDuration::FromSeconds(1.5));
encoder.NotifyEndOfStream();
nsTArray<RefPtr<EncodedFrame>> frames;
ASSERT_TRUE(NS_SUCCEEDED(encoder.GetEncodedTrack(frames)));
EncodedFrameContainer container;
ASSERT_TRUE(NS_SUCCEEDED(encoder.GetEncodedTrack(container)));
EXPECT_TRUE(encoder.IsEncodingComplete());
// Verify total duration being 1.5s.
uint64_t totalDuration = 0;
for (auto& frame : frames) {
totalDuration += frame->mDuration;
for (auto& frame : container.GetEncodedFrames()) {
totalDuration += frame->GetDuration();
}
const uint64_t oneAndAHalf = (PR_USEC_PER_SEC / 2) * 3;
EXPECT_EQ(oneAndAHalf, totalDuration);
@ -239,15 +240,15 @@ TEST(VP8VideoTrackEncoder, SkippedFrames)
encoder.AdvanceCurrentTime(now + TimeDuration::FromMilliseconds(100));
encoder.NotifyEndOfStream();
nsTArray<RefPtr<EncodedFrame>> frames;
ASSERT_TRUE(NS_SUCCEEDED(encoder.GetEncodedTrack(frames)));
EncodedFrameContainer container;
ASSERT_TRUE(NS_SUCCEEDED(encoder.GetEncodedTrack(container)));
EXPECT_TRUE(encoder.IsEncodingComplete());
// Verify total duration being 100 * 1ms = 100ms.
uint64_t totalDuration = 0;
for (auto& frame : frames) {
totalDuration += frame->mDuration;
for (auto& frame : container.GetEncodedFrames()) {
totalDuration += frame->GetDuration();
}
const uint64_t hundredMillis = PR_USEC_PER_SEC / 10;
EXPECT_EQ(hundredMillis, totalDuration);
@ -281,15 +282,15 @@ TEST(VP8VideoTrackEncoder, RoundingErrorFramesEncode)
encoder.AdvanceCurrentTime(now + TimeDuration::FromSeconds(1));
encoder.NotifyEndOfStream();
nsTArray<RefPtr<EncodedFrame>> frames;
ASSERT_TRUE(NS_SUCCEEDED(encoder.GetEncodedTrack(frames)));
EncodedFrameContainer container;
ASSERT_TRUE(NS_SUCCEEDED(encoder.GetEncodedTrack(container)));
EXPECT_TRUE(encoder.IsEncodingComplete());
// Verify total duration being 1s.
uint64_t totalDuration = 0;
for (auto& frame : frames) {
totalDuration += frame->mDuration;
for (auto& frame : container.GetEncodedFrames()) {
totalDuration += frame->GetDuration();
}
const uint64_t oneSecond = PR_USEC_PER_SEC;
EXPECT_EQ(oneSecond, totalDuration);
@ -318,8 +319,8 @@ TEST(VP8VideoTrackEncoder, TimestampFrameEncode)
encoder.AdvanceCurrentTime(now + TimeDuration::FromSeconds(0.3));
encoder.NotifyEndOfStream();
nsTArray<RefPtr<EncodedFrame>> frames;
ASSERT_TRUE(NS_SUCCEEDED(encoder.GetEncodedTrack(frames)));
EncodedFrameContainer container;
ASSERT_TRUE(NS_SUCCEEDED(encoder.GetEncodedTrack(container)));
EXPECT_TRUE(encoder.IsEncodingComplete());
@ -330,9 +331,9 @@ TEST(VP8VideoTrackEncoder, TimestampFrameEncode)
(PR_USEC_PER_SEC / 10)};
uint64_t totalDuration = 0;
size_t i = 0;
for (auto& frame : frames) {
EXPECT_EQ(expectedDurations[i++], frame->mDuration);
totalDuration += frame->mDuration;
for (auto& frame : container.GetEncodedFrames()) {
EXPECT_EQ(expectedDurations[i++], frame->GetDuration());
totalDuration += frame->GetDuration();
}
const uint64_t pointThree = (PR_USEC_PER_SEC / 10) * 3;
EXPECT_EQ(pointThree, totalDuration);
@ -367,8 +368,8 @@ TEST(VP8VideoTrackEncoder, DriftingFrameEncode)
encoder.AdvanceCurrentTime(now + TimeDuration::FromSeconds(0.3));
encoder.NotifyEndOfStream();
nsTArray<RefPtr<EncodedFrame>> frames;
ASSERT_TRUE(NS_SUCCEEDED(encoder.GetEncodedTrack(frames)));
EncodedFrameContainer container;
ASSERT_TRUE(NS_SUCCEEDED(encoder.GetEncodedTrack(container)));
EXPECT_TRUE(encoder.IsEncodingComplete());
@ -379,9 +380,9 @@ TEST(VP8VideoTrackEncoder, DriftingFrameEncode)
(PR_USEC_PER_SEC / 10) * 2};
uint64_t totalDuration = 0;
size_t i = 0;
for (auto& frame : frames) {
EXPECT_EQ(expectedDurations[i++], frame->mDuration);
totalDuration += frame->mDuration;
for (auto& frame : container.GetEncodedFrames()) {
EXPECT_EQ(expectedDurations[i++], frame->GetDuration());
totalDuration += frame->GetDuration();
}
const uint64_t pointSix = (PR_USEC_PER_SEC / 10) * 6;
EXPECT_EQ(pointSix, totalDuration);
@ -432,18 +433,18 @@ TEST(VP8VideoTrackEncoder, Suspended)
encoder.NotifyEndOfStream();
nsTArray<RefPtr<EncodedFrame>> frames;
ASSERT_TRUE(NS_SUCCEEDED(encoder.GetEncodedTrack(frames)));
EncodedFrameContainer container;
ASSERT_TRUE(NS_SUCCEEDED(encoder.GetEncodedTrack(container)));
EXPECT_TRUE(encoder.IsEncodingComplete());
// Verify that we have two encoded frames and a total duration of 0.2s.
const uint64_t two = 2;
EXPECT_EQ(two, frames.Length());
EXPECT_EQ(two, container.GetEncodedFrames().Length());
uint64_t totalDuration = 0;
for (auto& frame : frames) {
totalDuration += frame->mDuration;
for (auto& frame : container.GetEncodedFrames()) {
totalDuration += frame->GetDuration();
}
const uint64_t pointTwo = (PR_USEC_PER_SEC / 10) * 2;
EXPECT_EQ(pointTwo, totalDuration);
@ -482,18 +483,18 @@ TEST(VP8VideoTrackEncoder, SuspendedUntilEnd)
encoder.NotifyEndOfStream();
nsTArray<RefPtr<EncodedFrame>> frames;
ASSERT_TRUE(NS_SUCCEEDED(encoder.GetEncodedTrack(frames)));
EncodedFrameContainer container;
ASSERT_TRUE(NS_SUCCEEDED(encoder.GetEncodedTrack(container)));
EXPECT_TRUE(encoder.IsEncodingComplete());
// Verify that we have one encoded frames and a total duration of 0.1s.
const uint64_t one = 1;
EXPECT_EQ(one, frames.Length());
EXPECT_EQ(one, container.GetEncodedFrames().Length());
uint64_t totalDuration = 0;
for (auto& frame : frames) {
totalDuration += frame->mDuration;
for (auto& frame : container.GetEncodedFrames()) {
totalDuration += frame->GetDuration();
}
const uint64_t pointOne = PR_USEC_PER_SEC / 10;
EXPECT_EQ(pointOne, totalDuration);
@ -521,14 +522,14 @@ TEST(VP8VideoTrackEncoder, AlwaysSuspended)
encoder.NotifyEndOfStream();
nsTArray<RefPtr<EncodedFrame>> frames;
ASSERT_TRUE(NS_SUCCEEDED(encoder.GetEncodedTrack(frames)));
EncodedFrameContainer container;
ASSERT_TRUE(NS_SUCCEEDED(encoder.GetEncodedTrack(container)));
EXPECT_TRUE(encoder.IsEncodingComplete());
// Verify that we have no encoded frames.
const uint64_t none = 0;
EXPECT_EQ(none, frames.Length());
EXPECT_EQ(none, container.GetEncodedFrames().Length());
}
// Test that encoding a track that is suspended in the beginning works.
@ -565,18 +566,18 @@ TEST(VP8VideoTrackEncoder, SuspendedBeginning)
encoder.NotifyEndOfStream();
nsTArray<RefPtr<EncodedFrame>> frames;
ASSERT_TRUE(NS_SUCCEEDED(encoder.GetEncodedTrack(frames)));
EncodedFrameContainer container;
ASSERT_TRUE(NS_SUCCEEDED(encoder.GetEncodedTrack(container)));
EXPECT_TRUE(encoder.IsEncodingComplete());
// Verify that we have one encoded frames and a total duration of 0.1s.
const uint64_t one = 1;
EXPECT_EQ(one, frames.Length());
EXPECT_EQ(one, container.GetEncodedFrames().Length());
uint64_t totalDuration = 0;
for (auto& frame : frames) {
totalDuration += frame->mDuration;
for (auto& frame : container.GetEncodedFrames()) {
totalDuration += frame->GetDuration();
}
const uint64_t half = PR_USEC_PER_SEC / 2;
EXPECT_EQ(half, totalDuration);
@ -618,18 +619,18 @@ TEST(VP8VideoTrackEncoder, SuspendedOverlap)
encoder.AdvanceCurrentTime(now + TimeDuration::FromSeconds(2));
encoder.NotifyEndOfStream();
nsTArray<RefPtr<EncodedFrame>> frames;
ASSERT_TRUE(NS_SUCCEEDED(encoder.GetEncodedTrack(frames)));
EncodedFrameContainer container;
ASSERT_TRUE(NS_SUCCEEDED(encoder.GetEncodedTrack(container)));
EXPECT_TRUE(encoder.IsEncodingComplete());
// Verify that we have two encoded frames and a total duration of 0.1s.
const uint64_t two = 2;
ASSERT_EQ(two, frames.Length());
ASSERT_EQ(two, container.GetEncodedFrames().Length());
const uint64_t pointFive = (PR_USEC_PER_SEC / 10) * 5;
EXPECT_EQ(pointFive, frames[0]->mDuration);
EXPECT_EQ(pointFive, container.GetEncodedFrames()[0]->GetDuration());
const uint64_t pointSeven = (PR_USEC_PER_SEC / 10) * 7;
EXPECT_EQ(pointSeven, frames[1]->mDuration);
EXPECT_EQ(pointSeven, container.GetEncodedFrames()[1]->GetDuration());
}
// Test that ending a track in the middle of already pushed data works.
@ -650,14 +651,14 @@ TEST(VP8VideoTrackEncoder, PrematureEnding)
encoder.AdvanceCurrentTime(now + TimeDuration::FromSeconds(0.5));
encoder.NotifyEndOfStream();
nsTArray<RefPtr<EncodedFrame>> frames;
ASSERT_TRUE(NS_SUCCEEDED(encoder.GetEncodedTrack(frames)));
EncodedFrameContainer container;
ASSERT_TRUE(NS_SUCCEEDED(encoder.GetEncodedTrack(container)));
EXPECT_TRUE(encoder.IsEncodingComplete());
uint64_t totalDuration = 0;
for (auto& frame : frames) {
totalDuration += frame->mDuration;
for (auto& frame : container.GetEncodedFrames()) {
totalDuration += frame->GetDuration();
}
const uint64_t half = PR_USEC_PER_SEC / 2;
EXPECT_EQ(half, totalDuration);
@ -682,14 +683,14 @@ TEST(VP8VideoTrackEncoder, DelayedStart)
encoder.AdvanceCurrentTime(now + TimeDuration::FromSeconds(1));
encoder.NotifyEndOfStream();
nsTArray<RefPtr<EncodedFrame>> frames;
ASSERT_TRUE(NS_SUCCEEDED(encoder.GetEncodedTrack(frames)));
EncodedFrameContainer container;
ASSERT_TRUE(NS_SUCCEEDED(encoder.GetEncodedTrack(container)));
EXPECT_TRUE(encoder.IsEncodingComplete());
uint64_t totalDuration = 0;
for (auto& frame : frames) {
totalDuration += frame->mDuration;
for (auto& frame : container.GetEncodedFrames()) {
totalDuration += frame->GetDuration();
}
const uint64_t half = PR_USEC_PER_SEC / 2;
EXPECT_EQ(half, totalDuration);
@ -715,14 +716,14 @@ TEST(VP8VideoTrackEncoder, DelayedStartOtherEventOrder)
encoder.AdvanceCurrentTime(now + TimeDuration::FromSeconds(1));
encoder.NotifyEndOfStream();
nsTArray<RefPtr<EncodedFrame>> frames;
ASSERT_TRUE(NS_SUCCEEDED(encoder.GetEncodedTrack(frames)));
EncodedFrameContainer container;
ASSERT_TRUE(NS_SUCCEEDED(encoder.GetEncodedTrack(container)));
EXPECT_TRUE(encoder.IsEncodingComplete());
uint64_t totalDuration = 0;
for (auto& frame : frames) {
totalDuration += frame->mDuration;
for (auto& frame : container.GetEncodedFrames()) {
totalDuration += frame->GetDuration();
}
const uint64_t half = PR_USEC_PER_SEC / 2;
EXPECT_EQ(half, totalDuration);
@ -747,14 +748,14 @@ TEST(VP8VideoTrackEncoder, VeryDelayedStart)
encoder.AdvanceCurrentTime(now + TimeDuration::FromSeconds(10.5));
encoder.NotifyEndOfStream();
nsTArray<RefPtr<EncodedFrame>> frames;
ASSERT_TRUE(NS_SUCCEEDED(encoder.GetEncodedTrack(frames)));
EncodedFrameContainer container;
ASSERT_TRUE(NS_SUCCEEDED(encoder.GetEncodedTrack(container)));
EXPECT_TRUE(encoder.IsEncodingComplete());
uint64_t totalDuration = 0;
for (auto& frame : frames) {
totalDuration += frame->mDuration;
for (auto& frame : container.GetEncodedFrames()) {
totalDuration += frame->GetDuration();
}
const uint64_t half = PR_USEC_PER_SEC / 2;
EXPECT_EQ(half, totalDuration);
@ -784,34 +785,34 @@ TEST(VP8VideoTrackEncoder, LongFramesReEncoded)
{
encoder.AdvanceCurrentTime(now + TimeDuration::FromSeconds(1.5));
nsTArray<RefPtr<EncodedFrame>> frames;
ASSERT_TRUE(NS_SUCCEEDED(encoder.GetEncodedTrack(frames)));
EncodedFrameContainer container;
ASSERT_TRUE(NS_SUCCEEDED(encoder.GetEncodedTrack(container)));
EXPECT_FALSE(encoder.IsEncodingComplete());
uint64_t totalDuration = 0;
for (auto& frame : frames) {
totalDuration += frame->mDuration;
for (auto& frame : container.GetEncodedFrames()) {
totalDuration += frame->GetDuration();
}
const uint64_t oneSec = PR_USEC_PER_SEC;
EXPECT_EQ(oneSec, totalDuration);
EXPECT_EQ(1U, frames.Length());
EXPECT_EQ(1U, container.GetEncodedFrames().Length());
}
{
encoder.AdvanceCurrentTime(now + TimeDuration::FromSeconds(11));
encoder.NotifyEndOfStream();
nsTArray<RefPtr<EncodedFrame>> frames;
ASSERT_TRUE(NS_SUCCEEDED(encoder.GetEncodedTrack(frames)));
EncodedFrameContainer container;
ASSERT_TRUE(NS_SUCCEEDED(encoder.GetEncodedTrack(container)));
EXPECT_TRUE(encoder.IsEncodingComplete());
uint64_t totalDuration = 0;
for (auto& frame : frames) {
totalDuration += frame->mDuration;
for (auto& frame : container.GetEncodedFrames()) {
totalDuration += frame->GetDuration();
}
const uint64_t tenSec = PR_USEC_PER_SEC * 10;
EXPECT_EQ(tenSec, totalDuration);
EXPECT_EQ(10U, frames.Length());
EXPECT_EQ(10U, container.GetEncodedFrames().Length());
}
}
@ -852,36 +853,37 @@ TEST(VP8VideoTrackEncoder, ShortKeyFrameInterval)
encoder.AdvanceCurrentTime(now + TimeDuration::FromSeconds(1.2));
encoder.NotifyEndOfStream();
nsTArray<RefPtr<EncodedFrame>> frames;
ASSERT_TRUE(NS_SUCCEEDED(encoder.GetEncodedTrack(frames)));
EncodedFrameContainer container;
ASSERT_TRUE(NS_SUCCEEDED(encoder.GetEncodedTrack(container)));
EXPECT_TRUE(encoder.IsEncodingComplete());
const nsTArray<RefPtr<EncodedFrame>>& frames = container.GetEncodedFrames();
ASSERT_EQ(6UL, frames.Length());
// [0, 400ms)
EXPECT_EQ(PR_USEC_PER_SEC / 1000 * 400UL, frames[0]->mDuration);
EXPECT_EQ(EncodedFrame::VP8_I_FRAME, frames[0]->mFrameType);
EXPECT_EQ(PR_USEC_PER_SEC / 1000 * 400UL, frames[0]->GetDuration());
EXPECT_EQ(EncodedFrame::VP8_I_FRAME, frames[0]->GetFrameType());
// [400ms, 600ms)
EXPECT_EQ(PR_USEC_PER_SEC / 1000 * 200UL, frames[1]->mDuration);
EXPECT_EQ(EncodedFrame::VP8_P_FRAME, frames[1]->mFrameType);
EXPECT_EQ(PR_USEC_PER_SEC / 1000 * 200UL, frames[1]->GetDuration());
EXPECT_EQ(EncodedFrame::VP8_P_FRAME, frames[1]->GetFrameType());
// [600ms, 750ms)
EXPECT_EQ(PR_USEC_PER_SEC / 1000 * 150UL, frames[2]->mDuration);
EXPECT_EQ(EncodedFrame::VP8_I_FRAME, frames[2]->mFrameType);
EXPECT_EQ(PR_USEC_PER_SEC / 1000 * 150UL, frames[2]->GetDuration());
EXPECT_EQ(EncodedFrame::VP8_I_FRAME, frames[2]->GetFrameType());
// [750ms, 900ms)
EXPECT_EQ(PR_USEC_PER_SEC / 1000 * 150UL, frames[3]->mDuration);
EXPECT_EQ(EncodedFrame::VP8_P_FRAME, frames[3]->mFrameType);
EXPECT_EQ(PR_USEC_PER_SEC / 1000 * 150UL, frames[3]->GetDuration());
EXPECT_EQ(EncodedFrame::VP8_P_FRAME, frames[3]->GetFrameType());
// [900ms, 1100ms)
EXPECT_EQ(PR_USEC_PER_SEC / 1000 * 200UL, frames[4]->mDuration);
EXPECT_EQ(EncodedFrame::VP8_P_FRAME, frames[4]->mFrameType);
EXPECT_EQ(PR_USEC_PER_SEC / 1000 * 200UL, frames[4]->GetDuration());
EXPECT_EQ(EncodedFrame::VP8_P_FRAME, frames[4]->GetFrameType());
// [1100ms, 1200ms)
EXPECT_EQ(PR_USEC_PER_SEC / 1000 * 100UL, frames[5]->mDuration);
EXPECT_EQ(EncodedFrame::VP8_I_FRAME, frames[5]->mFrameType);
EXPECT_EQ(PR_USEC_PER_SEC / 1000 * 100UL, frames[5]->GetDuration());
EXPECT_EQ(EncodedFrame::VP8_I_FRAME, frames[5]->GetFrameType());
}
// Test that an encoding with a defined key frame interval encodes keyframes
@ -921,36 +923,37 @@ TEST(VP8VideoTrackEncoder, LongKeyFrameInterval)
encoder.AdvanceCurrentTime(now + TimeDuration::FromSeconds(2.2));
encoder.NotifyEndOfStream();
nsTArray<RefPtr<EncodedFrame>> frames;
ASSERT_TRUE(NS_SUCCEEDED(encoder.GetEncodedTrack(frames)));
EncodedFrameContainer container;
ASSERT_TRUE(NS_SUCCEEDED(encoder.GetEncodedTrack(container)));
EXPECT_TRUE(encoder.IsEncodingComplete());
const nsTArray<RefPtr<EncodedFrame>>& frames = container.GetEncodedFrames();
ASSERT_EQ(6UL, frames.Length());
// [0, 600ms)
EXPECT_EQ(PR_USEC_PER_SEC / 1000 * 600UL, frames[0]->mDuration);
EXPECT_EQ(EncodedFrame::VP8_I_FRAME, frames[0]->mFrameType);
EXPECT_EQ(PR_USEC_PER_SEC / 1000 * 600UL, frames[0]->GetDuration());
EXPECT_EQ(EncodedFrame::VP8_I_FRAME, frames[0]->GetFrameType());
// [600ms, 900ms)
EXPECT_EQ(PR_USEC_PER_SEC / 1000 * 300UL, frames[1]->mDuration);
EXPECT_EQ(EncodedFrame::VP8_P_FRAME, frames[1]->mFrameType);
EXPECT_EQ(PR_USEC_PER_SEC / 1000 * 300UL, frames[1]->GetDuration());
EXPECT_EQ(EncodedFrame::VP8_P_FRAME, frames[1]->GetFrameType());
// [900ms, 1100ms)
EXPECT_EQ(PR_USEC_PER_SEC / 1000 * 200UL, frames[2]->mDuration);
EXPECT_EQ(EncodedFrame::VP8_P_FRAME, frames[2]->mFrameType);
EXPECT_EQ(PR_USEC_PER_SEC / 1000 * 200UL, frames[2]->GetDuration());
EXPECT_EQ(EncodedFrame::VP8_P_FRAME, frames[2]->GetFrameType());
// [1100ms, 1900ms)
EXPECT_EQ(PR_USEC_PER_SEC / 1000 * 800UL, frames[3]->mDuration);
EXPECT_EQ(EncodedFrame::VP8_I_FRAME, frames[3]->mFrameType);
EXPECT_EQ(PR_USEC_PER_SEC / 1000 * 800UL, frames[3]->GetDuration());
EXPECT_EQ(EncodedFrame::VP8_I_FRAME, frames[3]->GetFrameType());
// [1900ms, 2100ms)
EXPECT_EQ(PR_USEC_PER_SEC / 1000 * 200UL, frames[4]->mDuration);
EXPECT_EQ(EncodedFrame::VP8_P_FRAME, frames[4]->mFrameType);
EXPECT_EQ(PR_USEC_PER_SEC / 1000 * 200UL, frames[4]->GetDuration());
EXPECT_EQ(EncodedFrame::VP8_P_FRAME, frames[4]->GetFrameType());
// [2100ms, 2200ms)
EXPECT_EQ(PR_USEC_PER_SEC / 1000 * 100UL, frames[5]->mDuration);
EXPECT_EQ(EncodedFrame::VP8_I_FRAME, frames[5]->mFrameType);
EXPECT_EQ(PR_USEC_PER_SEC / 1000 * 100UL, frames[5]->GetDuration());
EXPECT_EQ(EncodedFrame::VP8_I_FRAME, frames[5]->GetFrameType());
}
// Test that an encoding with no defined key frame interval encodes keyframes
@ -988,36 +991,37 @@ TEST(VP8VideoTrackEncoder, DefaultKeyFrameInterval)
encoder.AdvanceCurrentTime(now + TimeDuration::FromSeconds(2.2));
encoder.NotifyEndOfStream();
nsTArray<RefPtr<EncodedFrame>> frames;
ASSERT_TRUE(NS_SUCCEEDED(encoder.GetEncodedTrack(frames)));
EncodedFrameContainer container;
ASSERT_TRUE(NS_SUCCEEDED(encoder.GetEncodedTrack(container)));
EXPECT_TRUE(encoder.IsEncodingComplete());
const nsTArray<RefPtr<EncodedFrame>>& frames = container.GetEncodedFrames();
ASSERT_EQ(6UL, frames.Length());
// [0, 600ms)
EXPECT_EQ(PR_USEC_PER_SEC / 1000 * 600UL, frames[0]->mDuration);
EXPECT_EQ(EncodedFrame::VP8_I_FRAME, frames[0]->mFrameType);
EXPECT_EQ(PR_USEC_PER_SEC / 1000 * 600UL, frames[0]->GetDuration());
EXPECT_EQ(EncodedFrame::VP8_I_FRAME, frames[0]->GetFrameType());
// [600ms, 900ms)
EXPECT_EQ(PR_USEC_PER_SEC / 1000 * 300UL, frames[1]->mDuration);
EXPECT_EQ(EncodedFrame::VP8_P_FRAME, frames[1]->mFrameType);
EXPECT_EQ(PR_USEC_PER_SEC / 1000 * 300UL, frames[1]->GetDuration());
EXPECT_EQ(EncodedFrame::VP8_P_FRAME, frames[1]->GetFrameType());
// [900ms, 1100ms)
EXPECT_EQ(PR_USEC_PER_SEC / 1000 * 200UL, frames[2]->mDuration);
EXPECT_EQ(EncodedFrame::VP8_P_FRAME, frames[2]->mFrameType);
EXPECT_EQ(PR_USEC_PER_SEC / 1000 * 200UL, frames[2]->GetDuration());
EXPECT_EQ(EncodedFrame::VP8_P_FRAME, frames[2]->GetFrameType());
// [1100ms, 1900ms)
EXPECT_EQ(PR_USEC_PER_SEC / 1000 * 800UL, frames[3]->mDuration);
EXPECT_EQ(EncodedFrame::VP8_I_FRAME, frames[3]->mFrameType);
EXPECT_EQ(PR_USEC_PER_SEC / 1000 * 800UL, frames[3]->GetDuration());
EXPECT_EQ(EncodedFrame::VP8_I_FRAME, frames[3]->GetFrameType());
// [1900ms, 2100ms)
EXPECT_EQ(PR_USEC_PER_SEC / 1000 * 200UL, frames[4]->mDuration);
EXPECT_EQ(EncodedFrame::VP8_P_FRAME, frames[4]->mFrameType);
EXPECT_EQ(PR_USEC_PER_SEC / 1000 * 200UL, frames[4]->GetDuration());
EXPECT_EQ(EncodedFrame::VP8_P_FRAME, frames[4]->GetFrameType());
// [2100ms, 2200ms)
EXPECT_EQ(PR_USEC_PER_SEC / 1000 * 100UL, frames[5]->mDuration);
EXPECT_EQ(EncodedFrame::VP8_I_FRAME, frames[5]->mFrameType);
EXPECT_EQ(PR_USEC_PER_SEC / 1000 * 100UL, frames[5]->GetDuration());
EXPECT_EQ(EncodedFrame::VP8_I_FRAME, frames[5]->GetFrameType());
}
// Test that an encoding where the key frame interval is updated dynamically
@ -1027,7 +1031,7 @@ TEST(VP8VideoTrackEncoder, DynamicKeyFrameIntervalChanges)
TestVP8TrackEncoder encoder;
YUVBufferGenerator generator;
generator.Init(mozilla::gfx::IntSize(640, 480));
nsTArray<RefPtr<EncodedFrame>> frames;
EncodedFrameContainer container;
TimeStamp now = TimeStamp::Now();
// Set keyframe interval to 100ms.
@ -1076,7 +1080,7 @@ TEST(VP8VideoTrackEncoder, DynamicKeyFrameIntervalChanges)
// Advancing 501ms, so the first bit of the frame starting at 500ms is
// included.
encoder.AdvanceCurrentTime(now + TimeDuration::FromMilliseconds(501));
ASSERT_TRUE(NS_SUCCEEDED(encoder.GetEncodedTrack(frames)));
ASSERT_TRUE(NS_SUCCEEDED(encoder.GetEncodedTrack(container)));
{
VideoSegment segment;
@ -1102,7 +1106,7 @@ TEST(VP8VideoTrackEncoder, DynamicKeyFrameIntervalChanges)
// Advancing 2000ms from 501ms to 2501ms
encoder.AdvanceCurrentTime(now + TimeDuration::FromMilliseconds(2501));
ASSERT_TRUE(NS_SUCCEEDED(encoder.GetEncodedTrack(frames)));
ASSERT_TRUE(NS_SUCCEEDED(encoder.GetEncodedTrack(container)));
{
VideoSegment segment;
@ -1126,67 +1130,68 @@ TEST(VP8VideoTrackEncoder, DynamicKeyFrameIntervalChanges)
encoder.NotifyEndOfStream();
ASSERT_TRUE(NS_SUCCEEDED(encoder.GetEncodedTrack(frames)));
ASSERT_TRUE(NS_SUCCEEDED(encoder.GetEncodedTrack(container)));
EXPECT_TRUE(encoder.IsEncodingComplete());
const nsTArray<RefPtr<EncodedFrame>>& frames = container.GetEncodedFrames();
ASSERT_EQ(14UL, frames.Length());
// [0, 100ms)
EXPECT_EQ(PR_USEC_PER_SEC / 1000 * 100UL, frames[0]->mDuration);
EXPECT_EQ(EncodedFrame::VP8_I_FRAME, frames[0]->mFrameType);
EXPECT_EQ(PR_USEC_PER_SEC / 1000 * 100UL, frames[0]->GetDuration());
EXPECT_EQ(EncodedFrame::VP8_I_FRAME, frames[0]->GetFrameType());
// [100ms, 120ms)
EXPECT_EQ(PR_USEC_PER_SEC / 1000 * 20UL, frames[1]->mDuration);
EXPECT_EQ(EncodedFrame::VP8_I_FRAME, frames[1]->mFrameType);
EXPECT_EQ(PR_USEC_PER_SEC / 1000 * 20UL, frames[1]->GetDuration());
EXPECT_EQ(EncodedFrame::VP8_I_FRAME, frames[1]->GetFrameType());
// [120ms, 130ms)
EXPECT_EQ(PR_USEC_PER_SEC / 1000 * 10UL, frames[2]->mDuration);
EXPECT_EQ(EncodedFrame::VP8_P_FRAME, frames[2]->mFrameType);
EXPECT_EQ(PR_USEC_PER_SEC / 1000 * 10UL, frames[2]->GetDuration());
EXPECT_EQ(EncodedFrame::VP8_P_FRAME, frames[2]->GetFrameType());
// [130ms, 200ms)
EXPECT_EQ(PR_USEC_PER_SEC / 1000 * 70UL, frames[3]->mDuration);
EXPECT_EQ(EncodedFrame::VP8_P_FRAME, frames[3]->mFrameType);
EXPECT_EQ(PR_USEC_PER_SEC / 1000 * 70UL, frames[3]->GetDuration());
EXPECT_EQ(EncodedFrame::VP8_P_FRAME, frames[3]->GetFrameType());
// [200ms, 300ms)
EXPECT_EQ(PR_USEC_PER_SEC / 1000 * 100UL, frames[4]->mDuration);
EXPECT_EQ(EncodedFrame::VP8_I_FRAME, frames[4]->mFrameType);
EXPECT_EQ(PR_USEC_PER_SEC / 1000 * 100UL, frames[4]->GetDuration());
EXPECT_EQ(EncodedFrame::VP8_I_FRAME, frames[4]->GetFrameType());
// [300ms, 500ms)
EXPECT_EQ(PR_USEC_PER_SEC / 1000 * 200UL, frames[5]->mDuration);
EXPECT_EQ(EncodedFrame::VP8_I_FRAME, frames[5]->mFrameType);
EXPECT_EQ(PR_USEC_PER_SEC / 1000 * 200UL, frames[5]->GetDuration());
EXPECT_EQ(EncodedFrame::VP8_I_FRAME, frames[5]->GetFrameType());
// [500ms, 1300ms)
EXPECT_EQ(PR_USEC_PER_SEC / 1000 * 800UL, frames[6]->mDuration);
EXPECT_EQ(EncodedFrame::VP8_P_FRAME, frames[6]->mFrameType);
EXPECT_EQ(PR_USEC_PER_SEC / 1000 * 800UL, frames[6]->GetDuration());
EXPECT_EQ(EncodedFrame::VP8_P_FRAME, frames[6]->GetFrameType());
// [1300ms, 1400ms)
EXPECT_EQ(PR_USEC_PER_SEC / 1000 * 100UL, frames[7]->mDuration);
EXPECT_EQ(EncodedFrame::VP8_I_FRAME, frames[7]->mFrameType);
EXPECT_EQ(PR_USEC_PER_SEC / 1000 * 100UL, frames[7]->GetDuration());
EXPECT_EQ(EncodedFrame::VP8_I_FRAME, frames[7]->GetFrameType());
// [1400ms, 2400ms)
EXPECT_EQ(PR_USEC_PER_SEC / 1000 * 1000UL, frames[8]->mDuration);
EXPECT_EQ(EncodedFrame::VP8_P_FRAME, frames[8]->mFrameType);
EXPECT_EQ(PR_USEC_PER_SEC / 1000 * 1000UL, frames[8]->GetDuration());
EXPECT_EQ(EncodedFrame::VP8_P_FRAME, frames[8]->GetFrameType());
// [2400ms, 2500ms)
EXPECT_EQ(PR_USEC_PER_SEC / 1000 * 100UL, frames[9]->mDuration);
EXPECT_EQ(EncodedFrame::VP8_I_FRAME, frames[9]->mFrameType);
EXPECT_EQ(PR_USEC_PER_SEC / 1000 * 100UL, frames[9]->GetDuration());
EXPECT_EQ(EncodedFrame::VP8_I_FRAME, frames[9]->GetFrameType());
// [2500ms, 2600ms)
EXPECT_EQ(PR_USEC_PER_SEC / 1000 * 100UL, frames[10]->mDuration);
EXPECT_EQ(EncodedFrame::VP8_P_FRAME, frames[10]->mFrameType);
EXPECT_EQ(PR_USEC_PER_SEC / 1000 * 100UL, frames[10]->GetDuration());
EXPECT_EQ(EncodedFrame::VP8_P_FRAME, frames[10]->GetFrameType());
// [2600ms, 2800ms)
EXPECT_EQ(PR_USEC_PER_SEC / 1000 * 200UL, frames[11]->mDuration);
EXPECT_EQ(EncodedFrame::VP8_I_FRAME, frames[11]->mFrameType);
EXPECT_EQ(PR_USEC_PER_SEC / 1000 * 200UL, frames[11]->GetDuration());
EXPECT_EQ(EncodedFrame::VP8_I_FRAME, frames[11]->GetFrameType());
// [2800ms, 2900ms)
EXPECT_EQ(PR_USEC_PER_SEC / 1000 * 100UL, frames[12]->mDuration);
EXPECT_EQ(EncodedFrame::VP8_I_FRAME, frames[12]->mFrameType);
EXPECT_EQ(PR_USEC_PER_SEC / 1000 * 100UL, frames[12]->GetDuration());
EXPECT_EQ(EncodedFrame::VP8_I_FRAME, frames[12]->GetFrameType());
// [2900ms, 3000ms)
EXPECT_EQ(PR_USEC_PER_SEC / 1000 * 100UL, frames[13]->mDuration);
EXPECT_EQ(EncodedFrame::VP8_P_FRAME, frames[13]->mFrameType);
EXPECT_EQ(PR_USEC_PER_SEC / 1000 * 100UL, frames[13]->GetDuration());
EXPECT_EQ(EncodedFrame::VP8_P_FRAME, frames[13]->GetFrameType());
}
// Test that an encoding which is disabled on a frame timestamp encodes
@ -1196,7 +1201,7 @@ TEST(VP8VideoTrackEncoder, DisableOnFrameTime)
TestVP8TrackEncoder encoder;
YUVBufferGenerator generator;
generator.Init(mozilla::gfx::IntSize(640, 480));
nsTArray<RefPtr<EncodedFrame>> frames;
EncodedFrameContainer container;
TimeStamp now = TimeStamp::Now();
// Pass a frame in at t=0.
@ -1221,16 +1226,17 @@ TEST(VP8VideoTrackEncoder, DisableOnFrameTime)
encoder.Disable(now + TimeDuration::FromMilliseconds(100));
encoder.AdvanceCurrentTime(now + TimeDuration::FromMilliseconds(200));
encoder.NotifyEndOfStream();
ASSERT_TRUE(NS_SUCCEEDED(encoder.GetEncodedTrack(frames)));
ASSERT_TRUE(NS_SUCCEEDED(encoder.GetEncodedTrack(container)));
EXPECT_TRUE(encoder.IsEncodingComplete());
const nsTArray<RefPtr<EncodedFrame>>& frames = container.GetEncodedFrames();
ASSERT_EQ(2UL, frames.Length());
// [0, 100ms)
EXPECT_EQ(PR_USEC_PER_SEC / 1000 * 100UL, frames[0]->mDuration);
EXPECT_EQ(PR_USEC_PER_SEC / 1000 * 100UL, frames[0]->GetDuration());
// [100ms, 200ms)
EXPECT_EQ(PR_USEC_PER_SEC / 1000 * 100UL, frames[1]->mDuration);
EXPECT_EQ(PR_USEC_PER_SEC / 1000 * 100UL, frames[1]->GetDuration());
}
// Test that an encoding which is disabled between two frame timestamps encodes
@ -1240,7 +1246,7 @@ TEST(VP8VideoTrackEncoder, DisableBetweenFrames)
TestVP8TrackEncoder encoder;
YUVBufferGenerator generator;
generator.Init(mozilla::gfx::IntSize(640, 480));
nsTArray<RefPtr<EncodedFrame>> frames;
EncodedFrameContainer container;
TimeStamp now = TimeStamp::Now();
// Pass a frame in at t=0.
@ -1262,19 +1268,20 @@ TEST(VP8VideoTrackEncoder, DisableBetweenFrames)
encoder.Disable(now + TimeDuration::FromMilliseconds(50));
encoder.AdvanceCurrentTime(now + TimeDuration::FromMilliseconds(200));
encoder.NotifyEndOfStream();
ASSERT_TRUE(NS_SUCCEEDED(encoder.GetEncodedTrack(frames)));
ASSERT_TRUE(NS_SUCCEEDED(encoder.GetEncodedTrack(container)));
EXPECT_TRUE(encoder.IsEncodingComplete());
const nsTArray<RefPtr<EncodedFrame>>& frames = container.GetEncodedFrames();
ASSERT_EQ(3UL, frames.Length());
// [0, 50ms)
EXPECT_EQ(PR_USEC_PER_SEC / 1000 * 50UL, frames[0]->mDuration);
EXPECT_EQ(PR_USEC_PER_SEC / 1000 * 50UL, frames[0]->GetDuration());
// [50ms, 100ms)
EXPECT_EQ(PR_USEC_PER_SEC / 1000 * 50UL, frames[1]->mDuration);
EXPECT_EQ(PR_USEC_PER_SEC / 1000 * 50UL, frames[1]->GetDuration());
// [100ms, 200ms)
EXPECT_EQ(PR_USEC_PER_SEC / 1000 * 100UL, frames[2]->mDuration);
EXPECT_EQ(PR_USEC_PER_SEC / 1000 * 100UL, frames[2]->GetDuration());
}
// Test that an encoding which is enabled on a frame timestamp encodes
@ -1284,7 +1291,7 @@ TEST(VP8VideoTrackEncoder, EnableOnFrameTime)
TestVP8TrackEncoder encoder;
YUVBufferGenerator generator;
generator.Init(mozilla::gfx::IntSize(640, 480));
nsTArray<RefPtr<EncodedFrame>> frames;
EncodedFrameContainer container;
TimeStamp now = TimeStamp::Now();
// Disable the track at t=0.
@ -1311,16 +1318,17 @@ TEST(VP8VideoTrackEncoder, EnableOnFrameTime)
encoder.Enable(now + TimeDuration::FromMilliseconds(100));
encoder.AdvanceCurrentTime(now + TimeDuration::FromMilliseconds(200));
encoder.NotifyEndOfStream();
ASSERT_TRUE(NS_SUCCEEDED(encoder.GetEncodedTrack(frames)));
ASSERT_TRUE(NS_SUCCEEDED(encoder.GetEncodedTrack(container)));
EXPECT_TRUE(encoder.IsEncodingComplete());
const nsTArray<RefPtr<EncodedFrame>>& frames = container.GetEncodedFrames();
ASSERT_EQ(2UL, frames.Length());
// [0, 100ms)
EXPECT_EQ(PR_USEC_PER_SEC / 1000 * 100UL, frames[0]->mDuration);
EXPECT_EQ(PR_USEC_PER_SEC / 1000 * 100UL, frames[0]->GetDuration());
// [100ms, 200ms)
EXPECT_EQ(PR_USEC_PER_SEC / 1000 * 100UL, frames[1]->mDuration);
EXPECT_EQ(PR_USEC_PER_SEC / 1000 * 100UL, frames[1]->GetDuration());
}
// Test that an encoding which is enabled between two frame timestamps encodes
@ -1330,7 +1338,7 @@ TEST(VP8VideoTrackEncoder, EnableBetweenFrames)
TestVP8TrackEncoder encoder;
YUVBufferGenerator generator;
generator.Init(mozilla::gfx::IntSize(640, 480));
nsTArray<RefPtr<EncodedFrame>> frames;
EncodedFrameContainer container;
TimeStamp now = TimeStamp::Now();
// Disable the track at t=0.
@ -1354,19 +1362,20 @@ TEST(VP8VideoTrackEncoder, EnableBetweenFrames)
encoder.Enable(now + TimeDuration::FromMilliseconds(50));
encoder.AdvanceCurrentTime(now + TimeDuration::FromMilliseconds(200));
encoder.NotifyEndOfStream();
ASSERT_TRUE(NS_SUCCEEDED(encoder.GetEncodedTrack(frames)));
ASSERT_TRUE(NS_SUCCEEDED(encoder.GetEncodedTrack(container)));
EXPECT_TRUE(encoder.IsEncodingComplete());
const nsTArray<RefPtr<EncodedFrame>>& frames = container.GetEncodedFrames();
ASSERT_EQ(3UL, frames.Length());
// [0, 50ms)
EXPECT_EQ(PR_USEC_PER_SEC / 1000 * 50UL, frames[0]->mDuration);
EXPECT_EQ(PR_USEC_PER_SEC / 1000 * 50UL, frames[0]->GetDuration());
// [50ms, 100ms)
EXPECT_EQ(PR_USEC_PER_SEC / 1000 * 50UL, frames[1]->mDuration);
EXPECT_EQ(PR_USEC_PER_SEC / 1000 * 50UL, frames[1]->GetDuration());
// [100ms, 200ms)
EXPECT_EQ(PR_USEC_PER_SEC / 1000 * 100UL, frames[2]->mDuration);
EXPECT_EQ(PR_USEC_PER_SEC / 1000 * 100UL, frames[2]->GetDuration());
}
// Test that making time go backwards removes any future frames in the encoder.
@ -1375,7 +1384,7 @@ TEST(VP8VideoTrackEncoder, BackwardsTimeResets)
TestVP8TrackEncoder encoder;
YUVBufferGenerator generator;
generator.Init(mozilla::gfx::IntSize(640, 480));
nsTArray<RefPtr<EncodedFrame>> frames;
EncodedFrameContainer container;
TimeStamp now = TimeStamp::Now();
encoder.SetStartOffset(now);
@ -1422,22 +1431,23 @@ TEST(VP8VideoTrackEncoder, BackwardsTimeResets)
encoder.AdvanceCurrentTime(now + TimeDuration::FromMilliseconds(300));
encoder.NotifyEndOfStream();
ASSERT_TRUE(NS_SUCCEEDED(encoder.GetEncodedTrack(frames)));
ASSERT_TRUE(NS_SUCCEEDED(encoder.GetEncodedTrack(container)));
EXPECT_TRUE(encoder.IsEncodingComplete());
const nsTArray<RefPtr<EncodedFrame>>& frames = container.GetEncodedFrames();
ASSERT_EQ(4UL, frames.Length());
// [0, 100ms)
EXPECT_EQ(PR_USEC_PER_SEC / 1000 * 100UL, frames[0]->mDuration);
EXPECT_EQ(PR_USEC_PER_SEC / 1000 * 100UL, frames[0]->GetDuration());
// [100ms, 150ms)
EXPECT_EQ(PR_USEC_PER_SEC / 1000 * 50UL, frames[1]->mDuration);
EXPECT_EQ(PR_USEC_PER_SEC / 1000 * 50UL, frames[1]->GetDuration());
// [150ms, 250ms)
EXPECT_EQ(PR_USEC_PER_SEC / 1000 * 100UL, frames[2]->mDuration);
EXPECT_EQ(PR_USEC_PER_SEC / 1000 * 100UL, frames[2]->GetDuration());
// [250ms, 300ms)
EXPECT_EQ(PR_USEC_PER_SEC / 1000 * 50UL, frames[3]->mDuration);
EXPECT_EQ(PR_USEC_PER_SEC / 1000 * 50UL, frames[3]->GetDuration());
}
// Test that trying to encode a null image removes any future frames in the
@ -1447,7 +1457,7 @@ TEST(VP8VideoTrackEncoder, NullImageResets)
TestVP8TrackEncoder encoder;
YUVBufferGenerator generator;
generator.Init(mozilla::gfx::IntSize(640, 480));
nsTArray<RefPtr<EncodedFrame>> frames;
EncodedFrameContainer container;
TimeStamp now = TimeStamp::Now();
encoder.SetStartOffset(now);
@ -1494,19 +1504,20 @@ TEST(VP8VideoTrackEncoder, NullImageResets)
encoder.AdvanceCurrentTime(now + TimeDuration::FromMilliseconds(300));
encoder.NotifyEndOfStream();
ASSERT_TRUE(NS_SUCCEEDED(encoder.GetEncodedTrack(frames)));
ASSERT_TRUE(NS_SUCCEEDED(encoder.GetEncodedTrack(container)));
EXPECT_TRUE(encoder.IsEncodingComplete());
const nsTArray<RefPtr<EncodedFrame>>& frames = container.GetEncodedFrames();
ASSERT_EQ(3UL, frames.Length());
// [0, 100ms)
EXPECT_EQ(PR_USEC_PER_SEC / 1000 * 100UL, frames[0]->mDuration);
EXPECT_EQ(PR_USEC_PER_SEC / 1000 * 100UL, frames[0]->GetDuration());
// [100ms, 250ms)
EXPECT_EQ(PR_USEC_PER_SEC / 1000 * 150UL, frames[1]->mDuration);
EXPECT_EQ(PR_USEC_PER_SEC / 1000 * 150UL, frames[1]->GetDuration());
// [250ms, 300ms)
EXPECT_EQ(PR_USEC_PER_SEC / 1000 * 50UL, frames[2]->mDuration);
EXPECT_EQ(PR_USEC_PER_SEC / 1000 * 50UL, frames[2]->GetDuration());
}
// EOS test
@ -1520,8 +1531,8 @@ TEST(VP8VideoTrackEncoder, EncodeComplete)
// Pull Encoded Data back from encoder. Since we have sent
// EOS to encoder, encoder.GetEncodedTrack should return
// NS_OK immidiately.
nsTArray<RefPtr<EncodedFrame>> frames;
EXPECT_TRUE(NS_SUCCEEDED(encoder.GetEncodedTrack(frames)));
EncodedFrameContainer container;
EXPECT_TRUE(NS_SUCCEEDED(encoder.GetEncodedTrack(container)));
EXPECT_TRUE(encoder.IsEncodingComplete());
}

View File

@ -40,30 +40,28 @@ class WebMVP8TrackEncoder : public VP8TrackEncoder {
}
};
static void GetOpusMetadata(int aChannels, int aSampleRate,
TrackRate aTrackRate,
nsTArray<RefPtr<TrackMetadataBase>>& aMeta) {
WebMOpusTrackEncoder opusEncoder(aTrackRate);
EXPECT_TRUE(opusEncoder.TestOpusCreation(aChannels, aSampleRate));
aMeta.AppendElement(opusEncoder.GetMetadata());
}
static void GetVP8Metadata(int32_t aWidth, int32_t aHeight,
int32_t aDisplayWidth, int32_t aDisplayHeight,
TrackRate aTrackRate,
nsTArray<RefPtr<TrackMetadataBase>>& aMeta) {
WebMVP8TrackEncoder vp8Encoder;
EXPECT_TRUE(vp8Encoder.TestVP8Creation(aWidth, aHeight, aDisplayWidth,
aDisplayHeight));
aMeta.AppendElement(vp8Encoder.GetMetadata());
}
const uint64_t FIXED_DURATION = 1000000;
const uint32_t FIXED_FRAMESIZE = 500;
class TestWebMWriter : public WebMWriter {
public:
TestWebMWriter() : WebMWriter(), mTimestamp(0) {}
explicit TestWebMWriter(int aTrackTypes)
: WebMWriter(aTrackTypes), mTimestamp(0) {}
void SetOpusMetadata(int aChannels, int aSampleRate, TrackRate aTrackRate) {
WebMOpusTrackEncoder opusEncoder(aTrackRate);
EXPECT_TRUE(opusEncoder.TestOpusCreation(aChannels, aSampleRate));
RefPtr<TrackMetadataBase> opusMeta = opusEncoder.GetMetadata();
SetMetadata(opusMeta);
}
void SetVP8Metadata(int32_t aWidth, int32_t aHeight, int32_t aDisplayWidth,
int32_t aDisplayHeight, TrackRate aTrackRate) {
WebMVP8TrackEncoder vp8Encoder;
EXPECT_TRUE(vp8Encoder.TestVP8Creation(aWidth, aHeight, aDisplayWidth,
aDisplayHeight));
RefPtr<TrackMetadataBase> vp8Meta = vp8Encoder.GetMetadata();
SetMetadata(vp8Meta);
}
// When we append an I-Frame into WebM muxer, the muxer will treat previous
// data as "a cluster".
@ -71,22 +69,22 @@ class TestWebMWriter : public WebMWriter {
// previous cluster so that we can retrieve data by |GetContainerData|.
void AppendDummyFrame(EncodedFrame::FrameType aFrameType,
uint64_t aDuration) {
nsTArray<RefPtr<EncodedFrame>> encodedVideoData;
EncodedFrameContainer encodedVideoData;
nsTArray<uint8_t> frameData;
RefPtr<EncodedFrame> videoData = new EncodedFrame();
// Create dummy frame data.
frameData.SetLength(FIXED_FRAMESIZE);
videoData->mFrameType = aFrameType;
videoData->mTime = mTimestamp;
videoData->mDuration = aDuration;
videoData->SetFrameType(aFrameType);
videoData->SetTimeStamp(mTimestamp);
videoData->SetDuration(aDuration);
videoData->SwapInFrameData(frameData);
encodedVideoData.AppendElement(videoData);
encodedVideoData.AppendEncodedFrame(videoData);
WriteEncodedTrack(encodedVideoData, 0);
mTimestamp += aDuration;
}
bool HaveValidCluster() {
nsTArray<nsTArray<uint8_t>> encodedBuf;
nsTArray<nsTArray<uint8_t> > encodedBuf;
GetContainerData(&encodedBuf, 0);
return (encodedBuf.Length() > 0) ? true : false;
}
@ -98,32 +96,35 @@ class TestWebMWriter : public WebMWriter {
TEST(WebMWriter, Metadata)
{
TestWebMWriter writer;
TestWebMWriter writer(ContainerWriter::CREATE_AUDIO_TRACK |
ContainerWriter::CREATE_VIDEO_TRACK);
// The output should be empty since we didn't set any metadata in writer.
nsTArray<nsTArray<uint8_t>> encodedBuf;
nsTArray<nsTArray<uint8_t> > encodedBuf;
writer.GetContainerData(&encodedBuf, ContainerWriter::GET_HEADER);
EXPECT_TRUE(encodedBuf.Length() == 0);
writer.GetContainerData(&encodedBuf, ContainerWriter::FLUSH_NEEDED);
EXPECT_TRUE(encodedBuf.Length() == 0);
nsTArray<RefPtr<TrackMetadataBase>> meta;
// Get opus metadata.
// Set opus metadata.
int channel = 1;
int sampleRate = 44100;
TrackRate aTrackRate = 90000;
GetOpusMetadata(channel, sampleRate, aTrackRate, meta);
writer.SetOpusMetadata(channel, sampleRate, aTrackRate);
// Get vp8 metadata
// No output data since we didn't set both audio/video
// metadata in writer.
writer.GetContainerData(&encodedBuf, ContainerWriter::GET_HEADER);
EXPECT_TRUE(encodedBuf.Length() == 0);
writer.GetContainerData(&encodedBuf, ContainerWriter::FLUSH_NEEDED);
EXPECT_TRUE(encodedBuf.Length() == 0);
// Set vp8 metadata
int32_t width = 640;
int32_t height = 480;
int32_t displayWidth = 640;
int32_t displayHeight = 480;
GetVP8Metadata(width, height, displayWidth, displayHeight, aTrackRate, meta);
// Set metadata
writer.SetMetadata(meta);
writer.SetVP8Metadata(width, height, displayWidth, displayHeight, aTrackRate);
writer.GetContainerData(&encodedBuf, ContainerWriter::GET_HEADER);
EXPECT_TRUE(encodedBuf.Length() > 0);
@ -131,22 +132,21 @@ TEST(WebMWriter, Metadata)
TEST(WebMWriter, Cluster)
{
TestWebMWriter writer;
nsTArray<RefPtr<TrackMetadataBase>> meta;
// Get opus metadata.
TestWebMWriter writer(ContainerWriter::CREATE_AUDIO_TRACK |
ContainerWriter::CREATE_VIDEO_TRACK);
// Set opus metadata.
int channel = 1;
int sampleRate = 48000;
TrackRate aTrackRate = 90000;
GetOpusMetadata(channel, sampleRate, aTrackRate, meta);
// Get vp8 metadata
writer.SetOpusMetadata(channel, sampleRate, aTrackRate);
// Set vp8 metadata
int32_t width = 320;
int32_t height = 240;
int32_t displayWidth = 320;
int32_t displayHeight = 240;
GetVP8Metadata(width, height, displayWidth, displayHeight, aTrackRate, meta);
writer.SetMetadata(meta);
writer.SetVP8Metadata(width, height, displayWidth, displayHeight, aTrackRate);
nsTArray<nsTArray<uint8_t>> encodedBuf;
nsTArray<nsTArray<uint8_t> > encodedBuf;
writer.GetContainerData(&encodedBuf, ContainerWriter::GET_HEADER);
EXPECT_TRUE(encodedBuf.Length() > 0);
encodedBuf.Clear();
@ -174,20 +174,19 @@ TEST(WebMWriter, Cluster)
TEST(WebMWriter, FLUSH_NEEDED)
{
TestWebMWriter writer;
nsTArray<RefPtr<TrackMetadataBase>> meta;
// Get opus metadata.
TestWebMWriter writer(ContainerWriter::CREATE_AUDIO_TRACK |
ContainerWriter::CREATE_VIDEO_TRACK);
// Set opus metadata.
int channel = 2;
int sampleRate = 44100;
TrackRate aTrackRate = 100000;
GetOpusMetadata(channel, sampleRate, aTrackRate, meta);
// Get vp8 metadata
writer.SetOpusMetadata(channel, sampleRate, aTrackRate);
// Set vp8 metadata
int32_t width = 176;
int32_t height = 352;
int32_t displayWidth = 176;
int32_t displayHeight = 352;
GetVP8Metadata(width, height, displayWidth, displayHeight, aTrackRate, meta);
writer.SetMetadata(meta);
writer.SetVP8Metadata(width, height, displayWidth, displayHeight, aTrackRate);
// write the first I-Frame.
writer.AppendDummyFrame(EncodedFrame::VP8_I_FRAME, FIXED_DURATION);
@ -200,7 +199,7 @@ TEST(WebMWriter, FLUSH_NEEDED)
// retrieved
EXPECT_FALSE(writer.HaveValidCluster());
nsTArray<nsTArray<uint8_t>> encodedBuf;
nsTArray<nsTArray<uint8_t> > encodedBuf;
// Have data because the flag ContainerWriter::FLUSH_NEEDED
writer.GetContainerData(&encodedBuf, ContainerWriter::FLUSH_NEEDED);
EXPECT_TRUE(encodedBuf.Length() > 0);
@ -295,20 +294,19 @@ static int64_t webm_tell(void* aUserData) {
TEST(WebMWriter, bug970774_aspect_ratio)
{
TestWebMWriter writer;
nsTArray<RefPtr<TrackMetadataBase>> meta;
// Get opus metadata.
TestWebMWriter writer(ContainerWriter::CREATE_AUDIO_TRACK |
ContainerWriter::CREATE_VIDEO_TRACK);
// Set opus metadata.
int channel = 1;
int sampleRate = 44100;
TrackRate aTrackRate = 90000;
GetOpusMetadata(channel, sampleRate, aTrackRate, meta);
writer.SetOpusMetadata(channel, sampleRate, aTrackRate);
// Set vp8 metadata
int32_t width = 640;
int32_t height = 480;
int32_t displayWidth = 1280;
int32_t displayHeight = 960;
GetVP8Metadata(width, height, displayWidth, displayHeight, aTrackRate, meta);
writer.SetMetadata(meta);
writer.SetVP8Metadata(width, height, displayWidth, displayHeight, aTrackRate);
// write the first I-Frame.
writer.AppendDummyFrame(EncodedFrame::VP8_I_FRAME, FIXED_DURATION);
@ -317,7 +315,7 @@ TEST(WebMWriter, bug970774_aspect_ratio)
writer.AppendDummyFrame(EncodedFrame::VP8_I_FRAME, FIXED_DURATION);
// Get the metadata and the first cluster.
nsTArray<nsTArray<uint8_t>> encodedBuf;
nsTArray<nsTArray<uint8_t> > encodedBuf;
writer.GetContainerData(&encodedBuf, 0);
// Flatten the encodedBuf.
WebMioData ioData;

View File

@ -14,7 +14,6 @@ LOCAL_INCLUDES += [
]
UNIFIED_SOURCES += [
'AudioGenerator.cpp',
'MockMediaResource.cpp',
'TestAudioBuffers.cpp',
'TestAudioCallbackDriver.cpp',
@ -38,7 +37,6 @@ UNIFIED_SOURCES += [
'TestMediaSpan.cpp',
'TestMP3Demuxer.cpp',
'TestMP4Demuxer.cpp',
'TestMuxer.cpp',
'TestOpusParser.cpp',
'TestRust.cpp',
'TestTimeUnit.cpp',

View File

@ -1675,6 +1675,4 @@ bool SkeletonState::DecodeHeader(OggPacketPtr aPacket) {
return true;
}
#undef LOG
} // namespace mozilla

View File

@ -1893,5 +1893,5 @@ nsresult OggDemuxer::SeekBisection(TrackInfo::TrackType aType, int64_t aTarget,
}
#undef OGG_DEBUG
#undef SEEK_LOG
#undef SEEK_DEBUG
} // namespace mozilla

View File

@ -6,6 +6,7 @@
#include "prtime.h"
#include "GeckoProfiler.h"
#undef LOG
#define LOG(args, ...)
namespace mozilla {
@ -45,20 +46,22 @@ nsresult OggWriter::Init() {
return (rc == 0) ? NS_OK : NS_ERROR_NOT_INITIALIZED;
}
nsresult OggWriter::WriteEncodedTrack(
const nsTArray<RefPtr<EncodedFrame>>& aData, uint32_t aFlags) {
nsresult OggWriter::WriteEncodedTrack(const EncodedFrameContainer& aData,
uint32_t aFlags) {
AUTO_PROFILER_LABEL("OggWriter::WriteEncodedTrack", OTHER);
uint32_t len = aData.Length();
uint32_t len = aData.GetEncodedFrames().Length();
for (uint32_t i = 0; i < len; i++) {
if (aData[i]->mFrameType != EncodedFrame::OPUS_AUDIO_FRAME) {
if (aData.GetEncodedFrames()[i]->GetFrameType() !=
EncodedFrame::OPUS_AUDIO_FRAME) {
LOG("[OggWriter] wrong encoded data type!");
return NS_ERROR_FAILURE;
}
// only pass END_OF_STREAM on the last frame!
nsresult rv = WriteEncodedData(
aData[i]->GetFrameData(), aData[i]->mDuration,
aData.GetEncodedFrames()[i]->GetFrameData(),
aData.GetEncodedFrames()[i]->GetDuration(),
i < len - 1 ? (aFlags & ~ContainerWriter::END_OF_STREAM) : aFlags);
if (NS_FAILED(rv)) {
LOG("%p Failed to WriteEncodedTrack!", this);
@ -108,7 +111,7 @@ nsresult OggWriter::WriteEncodedData(const nsTArray<uint8_t>& aBuffer,
return NS_OK;
}
void OggWriter::ProduceOggPage(nsTArray<nsTArray<uint8_t>>* aOutputBufs) {
void OggWriter::ProduceOggPage(nsTArray<nsTArray<uint8_t> >* aOutputBufs) {
aOutputBufs->AppendElement();
aOutputBufs->LastElement().SetLength(mOggPage.header_len + mOggPage.body_len);
memcpy(aOutputBufs->LastElement().Elements(), mOggPage.header,
@ -117,7 +120,7 @@ void OggWriter::ProduceOggPage(nsTArray<nsTArray<uint8_t>>* aOutputBufs) {
mOggPage.body, mOggPage.body_len);
}
nsresult OggWriter::GetContainerData(nsTArray<nsTArray<uint8_t>>* aOutputBufs,
nsresult OggWriter::GetContainerData(nsTArray<nsTArray<uint8_t> >* aOutputBufs,
uint32_t aFlags) {
int rc = -1;
AUTO_PROFILER_LABEL("OggWriter::GetContainerData", OTHER);
@ -141,13 +144,12 @@ nsresult OggWriter::GetContainerData(nsTArray<nsTArray<uint8_t>>* aOutputBufs,
rc = ogg_stream_flush(&mOggStreamState, &mOggPage);
NS_ENSURE_TRUE(rc > 0, NS_ERROR_FAILURE);
ProduceOggPage(aOutputBufs);
return NS_OK;
// Force generate a page even if the amount of packet data is not enough.
// Usually do so after a header packet.
ProduceOggPage(aOutputBufs);
}
if (aFlags & ContainerWriter::FLUSH_NEEDED) {
} else if (aFlags & ContainerWriter::FLUSH_NEEDED) {
// rc = 0 means no packet to put into a page, or an internal error.
rc = ogg_stream_flush(&mOggStreamState, &mOggPage);
} else {
@ -162,25 +164,20 @@ nsresult OggWriter::GetContainerData(nsTArray<nsTArray<uint8_t>>* aOutputBufs,
if (aFlags & ContainerWriter::FLUSH_NEEDED) {
mIsWritingComplete = true;
}
// We always return NS_OK here since it's OK to call this without having
// enough data to fill a page. It's the more common case compared to internal
// errors, and we cannot distinguish the two.
return NS_OK;
return (rc > 0) ? NS_OK : NS_ERROR_FAILURE;
}
nsresult OggWriter::SetMetadata(
const nsTArray<RefPtr<TrackMetadataBase>>& aMetadata) {
MOZ_ASSERT(aMetadata.Length() == 1);
MOZ_ASSERT(aMetadata[0]);
nsresult OggWriter::SetMetadata(TrackMetadataBase* aMetadata) {
MOZ_ASSERT(aMetadata);
AUTO_PROFILER_LABEL("OggWriter::SetMetadata", OTHER);
if (aMetadata[0]->GetKind() != TrackMetadataBase::METADATA_OPUS) {
if (aMetadata->GetKind() != TrackMetadataBase::METADATA_OPUS) {
LOG("wrong meta data type!");
return NS_ERROR_FAILURE;
}
// Validate each field of METADATA
mMetadata = static_cast<OpusMetadata*>(aMetadata[0].get());
mMetadata = static_cast<OpusMetadata*>(aMetadata);
if (mMetadata->mIdHeader.Length() == 0) {
LOG("miss mIdHeader!");
return NS_ERROR_FAILURE;
@ -194,5 +191,3 @@ nsresult OggWriter::SetMetadata(
}
} // namespace mozilla
#undef LOG

View File

@ -23,17 +23,14 @@ class OggWriter : public ContainerWriter {
OggWriter();
~OggWriter();
// Write frames into the ogg container. aFlags should be set to END_OF_STREAM
// for the final set of frames.
nsresult WriteEncodedTrack(const nsTArray<RefPtr<EncodedFrame>>& aData,
nsresult WriteEncodedTrack(const EncodedFrameContainer& aData,
uint32_t aFlags = 0) override;
nsresult GetContainerData(nsTArray<nsTArray<uint8_t>>* aOutputBufs,
nsresult GetContainerData(nsTArray<nsTArray<uint8_t> >* aOutputBufs,
uint32_t aFlags = 0) override;
// Check metadata type integrity and reject unacceptable track encoder.
nsresult SetMetadata(
const nsTArray<RefPtr<TrackMetadataBase>>& aMetadata) override;
nsresult SetMetadata(TrackMetadataBase* aMetadata) override;
private:
nsresult Init();
@ -41,7 +38,7 @@ class OggWriter : public ContainerWriter {
nsresult WriteEncodedData(const nsTArray<uint8_t>& aBuffer, int aDuration,
uint32_t aFlags = 0);
void ProduceOggPage(nsTArray<nsTArray<uint8_t>>* aOutputBufs);
void ProduceOggPage(nsTArray<nsTArray<uint8_t> >* aOutputBufs);
// Store the Medatata from track encoder
RefPtr<OpusMetadata> mMetadata;

View File

@ -212,6 +212,4 @@ bool OpusParser::IsValidMapping2ChannelsCount(uint8_t aChannels) {
return val == valInt || valInt * valInt + 2 == aChannels;
}
#undef OPUS_LOG
} // namespace mozilla

View File

@ -56,8 +56,7 @@ function startTest() {
}
totalBlobSize += e.data.size;
ok(totalBlobSize > 0, 'check the totalBlobSize');
is(e.data.type, expectedMimeType, 'blob should have expected mimetype');
is(mMediaRecorder.mimeType, expectedMimeType, 'recorder should have expected mimetype');
is(mMediaRecorder.mimeType, expectedMimeType, 'blob should has mimetype, return ' + mMediaRecorder.mimeType);
if (!stopTriggered) {
mMediaRecorder.stop();
stopTriggered = true;

View File

@ -65,9 +65,7 @@ async function testRecord(source, mimeType) {
const chunks = [];
let {data} = await new Promise(r => recorder.ondataavailable = r);
if (!isOffline) {
is(recorder.state, "recording", "Expected to still be recording");
}
is(recorder.state, "recording", "Expected to still be recording");
is(data.type, recorder.mimeType, "Blob has recorder mimetype");
if (mimeType != "") {
is(data.type, mimeType, "Blob has given mimetype");

View File

@ -38,13 +38,13 @@ function startTest(test, token) {
info('onstart fired successfully');
hasonstart = true;
// On audio only case, we produce audio/ogg as mimeType.
is('audio/ogg', mMediaRecorder.mimeType, "MediaRecorder mimetype as expected");
is('audio/ogg', mMediaRecorder.mimeType, "check the record mimetype return " + mMediaRecorder.mimeType);
mMediaRecorder.requestData();
};
mMediaRecorder.onstop = function() {
info('onstop fired successfully');
ok(hasondataavailable, "should have ondataavailable before onstop");
ok (hasondataavailable, "should have ondataavailable before onstop");
is(mMediaRecorder.state, 'inactive', 'check recording status is inactive');
SimpleTest.finish();
};
@ -53,9 +53,8 @@ function startTest(test, token) {
info('ondataavailable fired successfully');
if (mMediaRecorder.state == 'recording') {
hasondataavailable = true;
ok(hasonstart, "should have had start event first");
is(e.data.type, mMediaRecorder.mimeType,
"blob's mimeType matches the recorder's");
ok(hasonstart, "should has onstart event first");
ok(e.data.size > 0, 'check blob has data');
mMediaRecorder.stop();
}
};

View File

@ -55,15 +55,14 @@ void EbmlComposer::GenerateHeader() {
if (mCodecPrivateData.Length() > 0) {
// Extract the pre-skip from mCodecPrivateData
// then convert it to nanoseconds.
// For more details see
// https://tools.ietf.org/html/rfc7845#section-4.2
uint64_t codecDelay = (uint64_t)LittleEndian::readUint16(
mCodecPrivateData.Elements() + 10) *
PR_NSEC_PER_SEC / 48000;
// Details in OpusTrackEncoder.cpp.
mCodecDelay = (uint64_t)LittleEndian::readUint16(
mCodecPrivateData.Elements() + 10) *
PR_NSEC_PER_SEC / 48000;
// Fixed 80ms, convert into nanoseconds.
uint64_t seekPreRoll = 80 * PR_NSEC_PER_MSEC;
writeAudioTrack(&ebml, 0x2, 0x0, "A_OPUS", mSampleFreq, mChannels,
codecDelay, seekPreRoll,
mCodecDelay, seekPreRoll,
mCodecPrivateData.Elements(),
mCodecPrivateData.Length());
}
@ -115,7 +114,7 @@ void EbmlComposer::WriteSimpleBlock(EncodedFrame* aFrame) {
EbmlGlobal ebml;
ebml.offset = 0;
auto frameType = aFrame->mFrameType;
auto frameType = aFrame->GetFrameType();
const bool isVP8IFrame = (frameType == EncodedFrame::FrameType::VP8_I_FRAME);
const bool isVP8PFrame = (frameType == EncodedFrame::FrameType::VP8_P_FRAME);
const bool isOpus = (frameType == EncodedFrame::FrameType::OPUS_AUDIO_FRAME);
@ -129,7 +128,11 @@ void EbmlComposer::WriteSimpleBlock(EncodedFrame* aFrame) {
return;
}
int64_t timeCode = aFrame->mTime / ((int)PR_USEC_PER_MSEC) - mClusterTimecode;
int64_t timeCode =
aFrame->GetTimeStamp() / ((int)PR_USEC_PER_MSEC) - mClusterTimecode;
if (isOpus) {
timeCode += mCodecDelay / PR_NSEC_PER_MSEC;
}
if (!mHasVideo && timeCode >= FLUSH_AUDIO_ONLY_AFTER_MS) {
MOZ_ASSERT(mHasAudio);
@ -154,11 +157,15 @@ void EbmlComposer::WriteSimpleBlock(EncodedFrame* aFrame) {
mClusterHeaderIndex = mClusters.Length() - 1;
mClusterLengthLoc = ebmlLoc.offset;
// if timeCode didn't under/overflow before, it shouldn't after this
mClusterTimecode = aFrame->mTime / PR_USEC_PER_MSEC;
mClusterTimecode = aFrame->GetTimeStamp() / PR_USEC_PER_MSEC;
Ebml_SerializeUnsigned(&ebml, Timecode, mClusterTimecode);
// Can't under-/overflow now
timeCode = aFrame->mTime / ((int)PR_USEC_PER_MSEC) - mClusterTimecode;
timeCode =
aFrame->GetTimeStamp() / ((int)PR_USEC_PER_MSEC) - mClusterTimecode;
if (isOpus) {
timeCode += mCodecDelay / PR_NSEC_PER_MSEC;
}
mWritingCluster = true;
}

View File

@ -38,8 +38,7 @@ class EbmlComposer {
/*
* Insert media encoded buffer into muxer and it would be package
* into SimpleBlock. If no cluster is opened, new cluster will start for
* writing. Frames passed to this function should already have any codec delay
* applied.
* writing.
*/
void WriteSimpleBlock(EncodedFrame* aFrame);
/*
@ -69,6 +68,8 @@ class EbmlComposer {
uint64_t mClusterLengthLoc = 0;
// Audio codec specific header data.
nsTArray<uint8_t> mCodecPrivateData;
// Codec delay in nanoseconds.
uint64_t mCodecDelay = 0;
// The timecode of the cluster.
uint64_t mClusterTimecode = 0;

View File

@ -1259,6 +1259,6 @@ int64_t WebMTrackDemuxer::GetEvictionOffset(const TimeUnit& aTime) {
return offset;
}
} // namespace mozilla
#undef WEBM_DEBUG
} // namespace mozilla

View File

@ -10,7 +10,8 @@
namespace mozilla {
WebMWriter::WebMWriter() : ContainerWriter() {
WebMWriter::WebMWriter(uint32_t aTrackTypes) : ContainerWriter() {
mMetadataRequiredFlag = aTrackTypes;
mEbmlComposer = new EbmlComposer();
}
@ -18,16 +19,17 @@ WebMWriter::~WebMWriter() {
// Out-of-line dtor so mEbmlComposer nsAutoPtr can delete a complete type.
}
nsresult WebMWriter::WriteEncodedTrack(
const nsTArray<RefPtr<EncodedFrame>>& aData, uint32_t aFlags) {
nsresult WebMWriter::WriteEncodedTrack(const EncodedFrameContainer& aData,
uint32_t aFlags) {
AUTO_PROFILER_LABEL("WebMWriter::WriteEncodedTrack", OTHER);
for (uint32_t i = 0; i < aData.Length(); i++) {
mEbmlComposer->WriteSimpleBlock(aData.ElementAt(i).get());
for (uint32_t i = 0; i < aData.GetEncodedFrames().Length(); i++) {
mEbmlComposer->WriteSimpleBlock(
aData.GetEncodedFrames().ElementAt(i).get());
}
return NS_OK;
}
nsresult WebMWriter::GetContainerData(nsTArray<nsTArray<uint8_t>>* aOutputBufs,
nsresult WebMWriter::GetContainerData(nsTArray<nsTArray<uint8_t> >* aOutputBufs,
uint32_t aFlags) {
AUTO_PROFILER_LABEL("WebMWriter::GetContainerData", OTHER);
mEbmlComposer->ExtractBuffer(aOutputBufs, aFlags);
@ -37,75 +39,40 @@ nsresult WebMWriter::GetContainerData(nsTArray<nsTArray<uint8_t>>* aOutputBufs,
return NS_OK;
}
nsresult WebMWriter::SetMetadata(
const nsTArray<RefPtr<TrackMetadataBase>>& aMetadata) {
nsresult WebMWriter::SetMetadata(TrackMetadataBase* aMetadata) {
MOZ_ASSERT(aMetadata);
AUTO_PROFILER_LABEL("WebMWriter::SetMetadata", OTHER);
MOZ_DIAGNOSTIC_ASSERT(!aMetadata.IsEmpty());
// Integrity checks
bool bad = false;
for (const RefPtr<TrackMetadataBase>& metadata : aMetadata) {
MOZ_ASSERT(metadata);
if (metadata->GetKind() == TrackMetadataBase::METADATA_VP8) {
VP8Metadata* meta = static_cast<VP8Metadata*>(metadata.get());
if (meta->mWidth == 0 || meta->mHeight == 0 || meta->mDisplayWidth == 0 ||
meta->mDisplayHeight == 0) {
bad = true;
}
}
if (metadata->GetKind() == TrackMetadataBase::METADATA_VORBIS) {
VorbisMetadata* meta = static_cast<VorbisMetadata*>(metadata.get());
if (meta->mSamplingFrequency == 0 || meta->mChannels == 0 ||
meta->mData.IsEmpty()) {
bad = true;
}
}
if (metadata->GetKind() == TrackMetadataBase::METADATA_OPUS) {
OpusMetadata* meta = static_cast<OpusMetadata*>(metadata.get());
if (meta->mSamplingFrequency == 0 || meta->mChannels == 0 ||
meta->mIdHeader.IsEmpty()) {
bad = true;
}
}
}
if (bad) {
return NS_ERROR_FAILURE;
if (aMetadata->GetKind() == TrackMetadataBase::METADATA_VP8) {
VP8Metadata* meta = static_cast<VP8Metadata*>(aMetadata);
MOZ_ASSERT(meta, "Cannot find vp8 encoder metadata");
mEbmlComposer->SetVideoConfig(meta->mWidth, meta->mHeight,
meta->mDisplayWidth, meta->mDisplayHeight);
mMetadataRequiredFlag =
mMetadataRequiredFlag & ~ContainerWriter::CREATE_VIDEO_TRACK;
}
// Storing
DebugOnly<bool> hasAudio = false;
DebugOnly<bool> hasVideo = false;
for (const RefPtr<TrackMetadataBase>& metadata : aMetadata) {
MOZ_ASSERT(metadata);
if (metadata->GetKind() == TrackMetadataBase::METADATA_VP8) {
MOZ_ASSERT(!hasVideo);
VP8Metadata* meta = static_cast<VP8Metadata*>(metadata.get());
mEbmlComposer->SetVideoConfig(meta->mWidth, meta->mHeight,
meta->mDisplayWidth, meta->mDisplayHeight);
hasVideo = true;
}
if (metadata->GetKind() == TrackMetadataBase::METADATA_VORBIS) {
MOZ_ASSERT(!hasAudio);
VorbisMetadata* meta = static_cast<VorbisMetadata*>(metadata.get());
mEbmlComposer->SetAudioConfig(meta->mSamplingFrequency, meta->mChannels);
mEbmlComposer->SetAudioCodecPrivateData(meta->mData);
hasAudio = true;
}
if (metadata->GetKind() == TrackMetadataBase::METADATA_OPUS) {
MOZ_ASSERT(!hasAudio);
OpusMetadata* meta = static_cast<OpusMetadata*>(metadata.get());
mEbmlComposer->SetAudioConfig(meta->mSamplingFrequency, meta->mChannels);
mEbmlComposer->SetAudioCodecPrivateData(meta->mIdHeader);
hasAudio = true;
}
if (aMetadata->GetKind() == TrackMetadataBase::METADATA_VORBIS) {
VorbisMetadata* meta = static_cast<VorbisMetadata*>(aMetadata);
MOZ_ASSERT(meta, "Cannot find vorbis encoder metadata");
mEbmlComposer->SetAudioConfig(meta->mSamplingFrequency, meta->mChannels);
mEbmlComposer->SetAudioCodecPrivateData(meta->mData);
mMetadataRequiredFlag =
mMetadataRequiredFlag & ~ContainerWriter::CREATE_AUDIO_TRACK;
}
if (aMetadata->GetKind() == TrackMetadataBase::METADATA_OPUS) {
OpusMetadata* meta = static_cast<OpusMetadata*>(aMetadata);
MOZ_ASSERT(meta, "Cannot find Opus encoder metadata");
mEbmlComposer->SetAudioConfig(meta->mSamplingFrequency, meta->mChannels);
mEbmlComposer->SetAudioCodecPrivateData(meta->mIdHeader);
mMetadataRequiredFlag =
mMetadataRequiredFlag & ~ContainerWriter::CREATE_AUDIO_TRACK;
}
if (!mMetadataRequiredFlag) {
mEbmlComposer->GenerateHeader();
}
mEbmlComposer->GenerateHeader();
return NS_OK;
}

View File

@ -41,28 +41,30 @@ class VP8Metadata : public TrackMetadataBase {
*/
class WebMWriter : public ContainerWriter {
public:
// Run in MediaRecorder thread
WebMWriter();
// aTrackTypes indicate this muxer should multiplex into Video only or A/V
// foramt. Run in MediaRecorder thread
explicit WebMWriter(uint32_t aTrackTypes);
virtual ~WebMWriter();
// WriteEncodedTrack inserts raw packets into WebM stream. Does not accept
// any flags: any specified will be ignored. Writing is finalized via
// flushing via GetContainerData().
nsresult WriteEncodedTrack(const nsTArray<RefPtr<EncodedFrame>>& aData,
// WriteEncodedTrack inserts raw packets into WebM stream.
nsresult WriteEncodedTrack(const EncodedFrameContainer& aData,
uint32_t aFlags = 0) override;
// GetContainerData outputs multiplexing data.
// aFlags indicates the muxer should enter into finished stage and flush out
// queue data.
nsresult GetContainerData(nsTArray<nsTArray<uint8_t>>* aOutputBufs,
nsresult GetContainerData(nsTArray<nsTArray<uint8_t> >* aOutputBufs,
uint32_t aFlags = 0) override;
// Assign metadata into muxer
nsresult SetMetadata(
const nsTArray<RefPtr<TrackMetadataBase>>& aMetadata) override;
nsresult SetMetadata(TrackMetadataBase* aMetadata) override;
private:
nsAutoPtr<EbmlComposer> mEbmlComposer;
// Indicate what kind of meta data needed in the writer.
// If this value become 0, it means writer can start to generate header.
uint8_t mMetadataRequiredFlag;
};
} // namespace mozilla

View File

@ -12,7 +12,7 @@ fuzzy-if(skiaContent,0-2,0-5) needs-focus == select-required-valid.html select-r
needs-focus == select-required-multiple-invalid.html select-required-multiple-ref.html
fuzzy-if(asyncPan&&!layersGPUAccelerated,0-84,0-77) fuzzy-if(skiaContent,0-1,0-1000) needs-focus == select-required-multiple-invalid-changed.html select-required-multiple-ref.html
needs-focus == select-required-multiple-valid.html select-required-multiple-ref.html
fuzzy-if(skiaContent&&!Android,0-2,0-10) needs-focus == select-disabled-fieldset-1.html select-fieldset-ref.html
fuzzy-if(skiaContent&&!Android,0-2,0-10) fuzzy-if(Android,0-9,0-1) needs-focus == select-disabled-fieldset-1.html select-fieldset-ref.html
fuzzy-if(skiaContent&&!Android,0-2,0-10) needs-focus == select-disabled-fieldset-2.html select-fieldset-ref.html
fuzzy-if(skiaContent,0-2,0-10) needs-focus == select-fieldset-legend.html select-fieldset-legend-ref.html
fuzzy-if(skiaContent,0-1,0-5) needs-focus == select-novalidate.html select-required-ref.html

View File

@ -19,6 +19,7 @@
<script defer="defer" type="module" src="chrome://global/content/certviewer/utils.js"></script>
<script defer="defer" type="module" src="chrome://global/content/certviewer/certDecoder.js"></script>
<script defer="defer" type="module" src="chrome://global/content/certviewer/certviewer.js"></script>
<script defer="defer" type="module" src="chrome://global/content/certviewer/components/dummy-info.js"></script>
<script defer="defer" type="module" src="chrome://global/content/certviewer/components/info-group.js"></script>
<script defer="defer" type="module" src="chrome://global/content/certviewer/components/info-item.js"></script>
<script defer="defer" type="module" src="chrome://global/content/certviewer/components/certificate-section.js"></script>

View File

@ -8,8 +8,10 @@
import { parse } from "./certDecoder.js";
import { pemToDER } from "./utils.js";
let gElements = {};
document.addEventListener("DOMContentLoaded", async e => {
gElements.certificateSection = document.querySelector("certificate-section");
let url = new URL(document.URL);
let certInfo = url.searchParams.getAll("cert");
if (certInfo.length === 0) {
@ -23,12 +25,11 @@ document.addEventListener("DOMContentLoaded", async e => {
export const updateSelectedItem = (() => {
let state;
return selectedItem => {
let certificateSection = document.querySelector("certificate-section");
if (selectedItem) {
if (state !== selectedItem) {
state = selectedItem;
certificateSection.updateCertificateSource(selectedItem);
certificateSection.updateSelectedTab(selectedItem);
gElements.certificateSection.updateCertificateSource(selectedItem);
gElements.certificateSection.updateSelectedTab(selectedItem);
}
}
return state;
@ -222,16 +223,17 @@ const adjustCertInformation = cert => {
Critical: cert.ext.scts.critical || false,
});
return {
certItems,
certItems.push({
tabName: cert.subject.cn,
};
});
return certItems;
};
const render = async (certs, error) => {
const render = async error => {
await customElements.whenDefined("certificate-section");
const CertificateSection = customElements.get("certificate-section");
document.querySelector("body").append(new CertificateSection(certs, error));
document.querySelector("body").append(new CertificateSection(error));
return Promise.resolve();
};
@ -258,9 +260,10 @@ const buildChain = async chain => {
return Promise.reject();
}
let adjustedCerts = certs.map(cert => adjustCertInformation(cert));
return render(adjustedCerts, false);
console.log(adjustedCerts);
return render(false);
})
.catch(err => {
render(null, true);
render(true);
});
};

View File

@ -3,13 +3,13 @@
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
import { updateSelectedItem } from "../certviewer.js";
import { certArray } from "./dummy-info.js";
import { InfoGroup } from "./info-group.js";
import { ErrorSection } from "./error-section.js";
class CertificateSection extends HTMLElement {
constructor(certs, error) {
constructor(error) {
super();
this.certs = certs;
this.error = error;
}
@ -42,9 +42,29 @@ class CertificateSection extends HTMLElement {
certificateTabs.appendChild(new ErrorSection());
return;
}
for (let i = 0; i < this.certs.length; i++) {
this.createInfoGroupsContainers(this.certs[i].certItems, i);
this.createTabSection(this.certs[i].tabName, i, certificateTabs);
this.createInfoGroupsContainers();
for (let i = 0; i < certArray.length; i++) {
let tab = document.createElement("button");
tab.textContent = "tab" + i;
tab.setAttribute("id", "tab" + i);
tab.setAttribute("aria-controls", "panel" + i);
tab.setAttribute("idnumber", i);
tab.setAttribute("role", "tab");
tab.classList.add("certificate-tab");
tab.classList.add("tab");
certificateTabs.appendChild(tab);
// If it is the first tab, allow it to be tabbable by the user.
// If it isn't the first tab, do not allow tab functionality,
// as arrow functionality is implemented in certviewer.js.
if (i === 0) {
tab.classList.add("selected");
tab.setAttribute("tabindex", 0);
} else {
tab.setAttribute("tabindex", -1);
}
this.infoGroupsContainers[0].classList.add("selected");
}
this.setAccessibilityEventListeners();
}
@ -92,43 +112,23 @@ class CertificateSection extends HTMLElement {
});
}
createInfoGroupsContainers(certArray, i) {
this.infoGroupsContainers[i] = document.createElement("div");
this.infoGroupsContainers[i].setAttribute("id", "panel" + i);
this.infoGroupsContainers[i].setAttribute("role", "tabpanel");
this.infoGroupsContainers[i].setAttribute("tabindex", 0);
this.infoGroupsContainers[i].setAttribute("aria-labelledby", "tab" + i);
if (i !== 0) {
this.infoGroupsContainers[i].setAttribute("hidden", true);
createInfoGroupsContainers() {
for (let i = 0; i < certArray.length; i++) {
this.infoGroupsContainers[i] = document.createElement("div");
this.infoGroupsContainers[i].setAttribute("id", "panel" + i);
this.infoGroupsContainers[i].setAttribute("role", "tabpanel");
this.infoGroupsContainers[i].setAttribute("tabindex", 0);
this.infoGroupsContainers[i].setAttribute("aria-labelledby", "tab" + i);
if (i !== 0) {
this.infoGroupsContainers[i].setAttribute("hidden", true);
}
this.infoGroupsContainers[i].classList.add("info-groups");
this.shadowRoot.appendChild(this.infoGroupsContainers[i]);
let arrayItem = certArray[i];
for (let j = 0; j < arrayItem.length; j++) {
this.infoGroupsContainers[i].appendChild(new InfoGroup(arrayItem[j]));
}
}
this.infoGroupsContainers[i].classList.add("info-groups");
this.shadowRoot.appendChild(this.infoGroupsContainers[i]);
for (let j = 0; j < certArray.length; j++) {
this.infoGroupsContainers[i].appendChild(new InfoGroup(certArray[j]));
}
}
createTabSection(tabName, i, certificateTabs) {
let tab = document.createElement("button");
tab.textContent = tabName;
tab.setAttribute("id", "tab" + i);
tab.setAttribute("aria-controls", "panel" + i);
tab.setAttribute("idnumber", i);
tab.setAttribute("role", "tab");
tab.classList.add("certificate-tab");
tab.classList.add("tab");
certificateTabs.appendChild(tab);
// If it is the first tab, allow it to be tabbable by the user.
// If it isn't the first tab, do not allow tab functionality,
// as arrow functionality is implemented in certviewer.js.
if (i === 0) {
tab.classList.add("selected");
tab.setAttribute("tabindex", 0);
} else {
tab.setAttribute("tabindex", -1);
}
this.infoGroupsContainers[0].classList.add("selected");
}
updateSelectedTab(index) {

View File

@ -0,0 +1,182 @@
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
export const certArray = [
[
{
sectionTitle: "Subject Name",
sectionItems: [
{
label: "Common Name",
info: "developer.mozilla.org",
},
],
},
{
sectionTitle: "Issuer Name",
sectionItems: [
{
label: "Country",
info: "US",
},
{
label: "Organization",
info: "Amazon",
},
{
label: "Organizational Unit",
info: "Server CA 1B",
},
{
label: "Common Name",
info: "Amazon",
},
],
},
{
sectionTitle: "Validity",
sectionItems: [
{
label: "Not Before",
info: "5/14/2019, 9:00:00 PM (Atlantic Daylight Time)",
},
{
label: "Not After",
info: "6/15/2020, 9:00:00 AM (Atlantic Daylight Time)",
},
],
},
{
sectionTitle: "Subject Alt Names",
sectionItems: [
{
label: "DNS Name",
info: "developer.mozilla.org",
},
{
label: "DNS Name",
info: "beta.developer.mozilla.org",
},
{
label: "DNS Name",
info: "developer-prod.mdn.mozit.cloud",
},
{
label: "DNS Name",
info: "wiki.developer.mozilla.org",
},
],
},
{
sectionTitle: "Public Key Info",
sectionItems: [
{
label: "Algorithm",
info: "RSA",
},
{
label: "Key Size",
info: "2048 bits",
},
{
label: "Exponent",
info: "65537",
},
{
label: "Modulus",
info:
"8B:FF:8A:9E:9E:2B:11:68:78:02:95:57:B6:84:F7:F3:32:46:BE:06:41:29:5B:AF:13:D7:93:28:4A:FC:8D:33:C9:07:BC:C5:CE:45:F5:60:42:A3:65:07:19:69:B8:67:97:9C:DB:B3:A7:67:D6:7A:57:BA:82:4E:63:83:33:B9:64:A1:56:1C:8A:EF:9F:7B:74:08:3F:D0:9B:E5:39:80:1C:C3:5D:4D:1B:4F:4A:23:BE:B5:BC:DD:18:5E:1D:CE:27:C8:7B:F7:5E:E6:9C:C3:E7:69:50:45:D1:BE:01:71:A3:61:19:6D:7F:B6:6E:4B:C0:E5:11:B0:0D:01:D3:5C:66:B1:1D:61:7D:BB:43:E4:40:63:D8:C5:82:18:6B:28:24:15:39:6A:82:4F:60:3F:66:6E:23:86:2A:84:E1:34:70:AE:06:2D:92:A7:84:80:AD:6F:6F:24:52:FA:7B:E8:C2:CD:E2:55:2E:AE:27:07:04:D4:B6:F1:EC:80:2D:D1:B2:E1:74:BE:ED:D4:04:8C:D8:06:44:CC:F9:6C:4E:64:68:35:38:48:59:F7:45:49:BF:34:EE:DD:55:C6:1A:EB:61:1F:4A:FA:30:3F:73:8B:36:A8:90:6E:CB:2E:58:8F:9C:78:0A:AE:4E:45:A0:30:61:5A:6A:F8:A3:32:92:E3",
},
],
},
],
[
{
sectionTitle: "Subject Name",
sectionItems: [
{
label: "Common Name",
info: "developer.mozilla.org 2",
},
],
},
{
sectionTitle: "Issuer Name",
sectionItems: [
{
label: "Country",
info: "US 2",
},
{
label: "Organization",
info: "Amazon",
},
{
label: "Organizational Unit",
info: "Server CA 1B",
},
{
label: "Common Name",
info: "Amazon",
},
],
},
{
sectionTitle: "Validity",
sectionItems: [
{
label: "Not Before",
info: "5/14/2019, 9:00:00 PM (Atlantic Daylight Time) 2",
},
{
label: "Not After",
info: "6/15/2020, 9:00:00 AM (Atlantic Daylight Time)",
},
],
},
{
sectionTitle: "Subject Alt Names",
sectionItems: [
{
label: "DNS Name",
info: "developer.mozilla.org 2",
},
{
label: "DNS Name",
info: "beta.developer.mozilla.org",
},
{
label: "DNS Name",
info: "developer-prod.mdn.mozit.cloud",
},
{
label: "DNS Name",
info: "wiki.developer.mozilla.org",
},
],
},
{
sectionTitle: "Public Key Info",
sectionItems: [
{
label: "Algorithm",
info: "RSA 2",
},
{
label: "Key Size",
info: "2048 bits",
},
{
label: "Exponent",
info: "65537",
},
{
label: "Modulus",
info:
"8B:FF:8A:9E:9E:2B:11:68:78:02:95:57:B6:84:F7:F3:32:46:BE:06:41:29:5B:AF:13:D7:93:28:4A:FC:8D:33:C9:07:BC:C5:CE:45:F5:60:42:A3:65:07:19:69:B8:67:97:9C:DB:B3:A7:67:D6:7A:57:BA:82:4E:63:83:33:B9:64:A1:56:1C:8A:EF:9F:7B:74:08:3F:D0:9B:E5:39:80:1C:C3:5D:4D:1B:4F:4A:23:BE:B5:BC:DD:18:5E:1D:CE:27:C8:7B:F7:5E:E6:9C:C3:E7:69:50:45:D1:BE:01:71:A3:61:19:6D:7F:B6:6E:4B:C0:E5:11:B0:0D:01:D3:5C:66:B1:1D:61:7D:BB:43:E4:40:63:D8:C5:82:18:6B:28:24:15:39:6A:82:4F:60:3F:66:6E:23:86:2A:84:E1:34:70:AE:06:2D:92:A7:84:80:AD:6F:6F:24:52:FA:7B:E8:C2:CD:E2:55:2E:AE:27:07:04:D4:B6:F1:EC:80:2D:D1:B2:E1:74:BE:ED:D4:04:8C:D8:06:44:CC:F9:6C:4E:64:68:35:38:48:59:F7:45:49:BF:34:EE:DD:55:C6:1A:EB:61:1F:4A:FA:30:3F:73:8B:36:A8:90:6E:CB:2E:58:8F:9C:78:0A:AE:4E:45:A0:30:61:5A:6A:F8:A3:32:92:E3",
},
],
},
],
];

View File

@ -10,6 +10,7 @@ toolkit.jar:
content/global/certviewer/components/certificate-section.css (content/components/certificate-section.css)
content/global/certviewer/components/error-section.js (content/components/error-section.js)
content/global/certviewer/components/error-section.css (content/components/error-section.css)
content/global/certviewer/components/dummy-info.js (content/components/dummy-info.js)
content/global/certviewer/components/info-group.js (content/components/info-group.js)
content/global/certviewer/components/info-group.css (content/components/info-group.css)
content/global/certviewer/components/info-item.js (content/components/info-item.js)

View File

@ -1,245 +0,0 @@
"use strict";
const adjustedCerts = {
certItems: [
{
sectionTitle: "Subject Name",
sectionItems: [
{ label: "Business Category", info: "Private Organization" },
{ label: "Inc. Country", info: "US" },
{ label: "Inc. State / Province", info: "Delaware" },
{ label: "Serial Number", info: "5157550" },
{ label: "Country", info: "US" },
{ label: "State / Province", info: "California" },
{ label: "Locality", info: "San Francisco" },
{ label: "Organization", info: "GitHub, Inc." },
{ label: "Common Name", info: "github.com" },
],
Critical: false,
},
{
sectionTitle: "Issuer Name",
sectionItems: [
{ label: "Country", info: "US" },
{ label: "Organization", info: "DigiCert Inc" },
{ label: "Organizational Unit", info: "www.digicert.com" },
{
label: "Common Name",
info: "DigiCert SHA2 Extended Validation Server CA",
},
],
Critical: false,
},
{
sectionTitle: "Validity",
sectionItems: [
{
label: "Not Before",
info: "5/7/2018, 9:00:00 PM (Brasilia Standard Time)",
},
{ label: "Not Before UTC", info: "Tue, 08 May 2018 00:00:00 GMT" },
{
label: "Not After",
info: "6/3/2020, 9:00:00 AM (Brasilia Standard Time)",
},
{ label: "Not After UTC", info: "Wed, 03 Jun 2020 12:00:00 GMT" },
],
Critical: false,
},
{
sectionTitle: "Subject Alt Names",
sectionItems: [
{ label: "DNS Name", info: "github.com" },
{ label: "DNS Name", info: "www.github.com" },
],
Critical: false,
},
{
sectionTitle: "Public Key Info",
sectionItems: [
{ label: "Algorithm", info: "RSA" },
{ label: "Key size", info: 2048 },
{ label: "Curve" },
{ label: "Public Value" },
{ label: "Exponent", info: 65537 },
{
label: "Modulus",
info:
"C6:3C:AA:F2:3C:97:0C:3A:C1:4F:28:AD:72:70:7D:D3:CE:B9:B5:60:73:A4:74:9B:8A:77:46:FD:7A:98:42:4C:C5:30:19:57:9A:A9:33:0B:E1:5D:4D:10:58:CA:77:99:C3:93:F3:F9:75:90:BC:BF:BB:E0:95:BA:2E:C5:8D:73:61:05:D3:10:84:A8:B3:89:B8:2F:73:8C:F0:2A:6E:BE:EE:AE:83:4B:82:11:B1:61:FD:77:61:DA:9B:1B:9A:23:FF:8C:7E:A2:01:06:DD:D1:7F:53:96:08:C1:5A:FA:E7:C0:CA:C8:44:8C:57:A7:A8:61:5F:66:0D:57:D3:B8:96:AC:B6:4A:9C:C1:EA:E8:FB:96:40:29:F6:15:30:B5:04:B0:CC:05:B6:84:C3:24:59:95:7F:A2:65:90:E5:B0:B3:1A:75:59:C4:3F:31:14:0A:D5:CC:AA:3A:85:05:52:06:32:96:07:61:DF:27:82:0C:F7:85:DB:60:31:F0:09:50:C5:B7:1A:23:E1:B0:7D:02:F5:14:1E:C9:CB:E8:7E:2A:33:04:F6:51:3F:52:98:15:E9:0B:76:47:5C:4D:4A:6B:C5:08:15:AE:F8:D1:57:E9:EA:70:14:FF:C9:45:B9:0C:7C:BC:F4:6D:E6:05:52:F9:8C:80:BB:70:56:91:0F:4B",
},
],
Critical: false,
},
{
sectionTitle: "Miscellaneous",
sectionItems: [
{
label: "Serial Number",
info: "0A:06:30:42:7F:5B:BC:ED:69:57:39:65:93:B6:45:1F",
},
{ label: "Signature Algorithm", info: "SHA-256 with RSA Encryption" },
{ label: "Version", info: "3" },
{
label: "Download",
info:
"-----BEGIN%20CERTIFICATE-----%0D%0AMIIHQjCCBiqgAwIBAgIQCgYwQn9bvO1pVzllk7ZFHzANBgkqhkiG9w0BAQsFADB1%0D%0AMQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3%0D%0Ad3cuZGlnaWNlcnQuY29tMTQwMgYDVQQDEytEaWdpQ2VydCBTSEEyIEV4dGVuZGVk%0D%0AIFZhbGlkYXRpb24gU2VydmVyIENBMB4XDTE4MDUwODAwMDAwMFoXDTIwMDYwMzEy%0D%0AMDAwMFowgccxHTAbBgNVBA8MFFByaXZhdGUgT3JnYW5pemF0aW9uMRMwEQYLKwYB%0D%0ABAGCNzwCAQMTAlVTMRkwFwYLKwYBBAGCNzwCAQITCERlbGF3YXJlMRAwDgYDVQQF%0D%0AEwc1MTU3NTUwMQswCQYDVQQGEwJVUzETMBEGA1UECBMKQ2FsaWZvcm5pYTEWMBQG%0D%0AA1UEBxMNU2FuIEZyYW5jaXNjbzEVMBMGA1UEChMMR2l0SHViLCBJbmMuMRMwEQYD%0D%0AVQQDEwpnaXRodWIuY29tMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA%0D%0Axjyq8jyXDDrBTyitcnB90865tWBzpHSbindG/XqYQkzFMBlXmqkzC+FdTRBYyneZ%0D%0Aw5Pz+XWQvL+74JW6LsWNc2EF0xCEqLOJuC9zjPAqbr7uroNLghGxYf13YdqbG5oj%0D%0A/4x+ogEG3dF/U5YIwVr658DKyESMV6eoYV9mDVfTuJastkqcwero+5ZAKfYVMLUE%0D%0AsMwFtoTDJFmVf6JlkOWwsxp1WcQ/MRQK1cyqOoUFUgYylgdh3yeCDPeF22Ax8AlQ%0D%0AxbcaI+GwfQL1FB7Jy+h+KjME9lE/UpgV6Qt2R1xNSmvFCBWu+NFX6epwFP/JRbkM%0D%0AfLz0beYFUvmMgLtwVpEPSwIDAQABo4IDeTCCA3UwHwYDVR0jBBgwFoAUPdNQpdag%0D%0Are7zSmAKZdMh1Pj41g8wHQYDVR0OBBYEFMnCU2FmnV+rJfQmzQ84mqhJ6kipMCUG%0D%0AA1UdEQQeMByCCmdpdGh1Yi5jb22CDnd3dy5naXRodWIuY29tMA4GA1UdDwEB/wQE%0D%0AAwIFoDAdBgNVHSUEFjAUBggrBgEFBQcDAQYIKwYBBQUHAwIwdQYDVR0fBG4wbDA0%0D%0AoDKgMIYuaHR0cDovL2NybDMuZGlnaWNlcnQuY29tL3NoYTItZXYtc2VydmVyLWcy%0D%0ALmNybDA0oDKgMIYuaHR0cDovL2NybDQuZGlnaWNlcnQuY29tL3NoYTItZXYtc2Vy%0D%0AdmVyLWcyLmNybDBLBgNVHSAERDBCMDcGCWCGSAGG/WwCATAqMCgGCCsGAQUFBwIB%0D%0AFhxodHRwczovL3d3dy5kaWdpY2VydC5jb20vQ1BTMAcGBWeBDAEBMIGIBggrBgEF%0D%0ABQcBAQR8MHowJAYIKwYBBQUHMAGGGGh0dHA6Ly9vY3NwLmRpZ2ljZXJ0LmNvbTBS%0D%0ABggrBgEFBQcwAoZGaHR0cDovL2NhY2VydHMuZGlnaWNlcnQuY29tL0RpZ2lDZXJ0%0D%0AU0hBMkV4dGVuZGVkVmFsaWRhdGlvblNlcnZlckNBLmNydDAMBgNVHRMBAf8EAjAA%0D%0AMIIBfgYKKwYBBAHWeQIEAgSCAW4EggFqAWgAdgCkuQmQtBhYFIe7E6LMZ3AKPDWY%0D%0ABPkb37jjd80OyA3cEAAAAWNBYm0KAAAEAwBHMEUCIQDRZp38cTWsWH2GdBpe/uPT%0D%0AWnsu/m4BEC2+dIcvSykZYgIgCP5gGv6yzaazxBK2NwGdmmyuEFNSg2pARbMJlUFg%0D%0AU5UAdgBWFAaaL9fC7NP14b1Esj7HRna5vJkRXMDvlJhV1onQ3QAAAWNBYm0tAAAE%0D%0AAwBHMEUCIQCi7omUvYLm0b2LobtEeRAYnlIo7n6JxbYdrtYdmPUWJQIgVgw1AZ51%0D%0AvK9ENinBg22FPxb82TvNDO05T17hxXRC2IYAdgC72d+8H4pxtZOUI5eqkntHOFeV%0D%0ACqtS6BqQlmQ2jh7RhQAAAWNBYm3fAAAEAwBHMEUCIQChzdTKUU2N+XcqcK0OJYrN%0D%0A8EYynloVxho4yPk6Dq3EPgIgdNH5u8rC3UcslQV4B9o0a0w204omDREGKTVuEpxG%0D%0AeOQwDQYJKoZIhvcNAQELBQADggEBAHAPWpanWOW/ip2oJ5grAH8mqQfaunuCVE+v%0D%0Aac+88lkDK/LVdFgl2B6kIHZiYClzKtfczG93hWvKbST4NRNHP9LiaQqdNC17e5vN%0D%0AHnXVUGw+yxyjMLGqkgepOnZ2Rb14kcTOGp4i5AuJuuaMwXmCo7jUwPwfLe1NUlVB%0D%0AKqg6LK0Hcq4K0sZnxE8HFxiZ92WpV2AVWjRMEc/2z2shNoDvxvFUYyY1Oe67xINk%0D%0AmyQKc+ygSBZzyLnXSFVWmHr3u5dcaaQGGAR42v6Ydr4iL38Hd4dOiBma+FXsXBIq%0D%0AWUjbST4VXmdaol7uzFMojA4zkxQDZAvF5XgJlAFadfySna/teik=%0D%0A-----END%20CERTIFICATE-----%0D%0A",
},
],
Critical: false,
},
{
sectionTitle: "Fingerprints",
sectionItems: [
{
label: "SHA-256",
info:
"31:11:50:0C:4A:66:01:2C:DA:E3:33:EC:3F:CA:1C:9D:DE:45:C9:54:44:0E:7E:E4:13:71:6B:FF:36:63:C0:74",
},
{
label: "SHA-1",
info: "CA:06:F5:6B:25:8B:7A:0D:4F:2B:05:47:09:39:47:86:51:15:19:84",
},
],
Critical: false,
},
{
sectionTitle: "Basic Constraints",
sectionItems: [{ label: "Certificate Authority" }],
Critical: true,
},
{
sectionTitle: "Key Usages",
sectionItems: [
{ label: "Purposes", info: ["Digital Signature", "Key Encipherment"] },
],
Critical: true,
},
{
sectionTitle: "Extended Key Usages",
sectionItems: [
{
label: "Purposes",
info: ["Server Authentication", "Client Authentication"],
},
],
Critical: false,
},
{
sectionTitle: "OCSP Stapling",
sectionItems: [{ label: "Required", info: false }],
Critical: false,
},
{
sectionTitle: "Subject Key ID",
sectionItems: [
{
label: "Key ID",
info: "C9:C2:53:61:66:9D:5F:AB:25:F4:26:CD:0F:38:9A:A8:49:EA:48:A9",
},
],
Critical: false,
},
{
sectionTitle: "Authority Key ID",
sectionItems: [
{
label: "Key ID",
info: "3D:D3:50:A5:D6:A0:AD:EE:F3:4A:60:0A:65:D3:21:D4:F8:F8:D6:0F",
},
],
Critical: false,
},
{
sectionTitle: "CRL Endpoints",
sectionItems: [
{
label: "Distribution Point",
info: "http://crl3.digicert.com/sha2-ev-server-g2.crl",
},
{
label: "Distribution Point",
info: "http://crl4.digicert.com/sha2-ev-server-g2.crl",
},
],
Critical: false,
},
{
sectionTitle: "Authority Info (AIA)",
sectionItems: [
{ label: "Location", info: "http://ocsp.digicert.com" },
{ label: "Method", info: "Online Certificate Status Protocol (OCSP)" },
{
label: "Location",
info:
"http://cacerts.digicert.com/DigiCertSHA2ExtendedValidationServerCA.crt",
},
{ label: "Method", info: "CA Issuers" },
],
Critical: false,
},
{
sectionTitle: "Certificate Policies",
sectionItems: [
{
label: "Policy",
info: "ANSI Organizational Identifier ( 2.16.840 )",
},
{ label: "Value", info: "2.16.840.1.114412.2.1" },
{
label: "Qualifier",
info: "Practices Statement ( 1.3.6.1.5.5.7.2.1 )",
},
{ label: "Value", info: "https://www.digicert.com/CPS" },
{ label: "Policy", info: "Certificate Type ( 2.23.140.1.1 )" },
{ label: "Value", info: "Extended Validation" },
],
Critical: false,
},
{
sectionTitle: "Embedded SCTs",
sectionItems: [
{
label: "logId",
info:
"A4:B9:09:90:B4:18:58:14:87:BB:13:A2:CC:67:70:0A:3C:35:98:04:F9:1B:DF:B8:E3:77:CD:0E:C8:0D:DC:10",
},
{ label: "name", info: "Google “Pilot”" },
{ label: "signatureAlgorithm", info: "SHA-256 ECDSA" },
{
label: "timestamp",
info: "5/8/2018, 5:12:39 PM (Brasilia Standard Time)",
},
{ label: "timestampUTC", info: "Tue, 08 May 2018 20:12:39 GMT" },
{ label: "version", info: 1 },
{
label: "logId",
info:
"56:14:06:9A:2F:D7:C2:EC:D3:F5:E1:BD:44:B2:3E:C7:46:76:B9:BC:99:11:5C:C0:EF:94:98:55:D6:89:D0:DD",
},
{ label: "name", info: "DigiCert Server" },
{ label: "signatureAlgorithm", info: "SHA-256 ECDSA" },
{
label: "timestamp",
info: "5/8/2018, 5:12:39 PM (Brasilia Standard Time)",
},
{ label: "timestampUTC", info: "Tue, 08 May 2018 20:12:39 GMT" },
{ label: "version", info: 1 },
{
label: "logId",
info:
"BB:D9:DF:BC:1F:8A:71:B5:93:94:23:97:AA:92:7B:47:38:57:95:0A:AB:52:E8:1A:90:96:64:36:8E:1E:D1:85",
},
{ label: "name", info: "Google “Skydiver”" },
{ label: "signatureAlgorithm", info: "SHA-256 ECDSA" },
{
label: "timestamp",
info: "5/8/2018, 5:12:39 PM (Brasilia Standard Time)",
},
{ label: "timestampUTC", info: "Tue, 08 May 2018 20:12:39 GMT" },
{ label: "version", info: 1 },
],
Critical: false,
},
],
tabName: "github.com",
};

View File

@ -1,5 +1 @@
[DEFAULT]
support-files =
adjustedCerts.js
[browser_openTabAndSendCertInfo.js]
[browser_renderCertToUI.js]

View File

@ -1,124 +0,0 @@
/* Any copyright is dedicated to the Public Domain.
* http://creativecommons.org/publicdomain/zero/1.0/ */
"use strict";
const url =
"about:certificate?cert=MIIHQjCCBiqgAwIBAgIQCgYwQn9bvO1pVzllk7ZFHzANBgkqhkiG9w0BAQsFADB1MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cuZGlnaWNlcnQuY29tMTQwMgYDVQQDEytEaWdpQ2VydCBTSEEyIEV4dGVuZGVkIFZhbGlkYXRpb24gU2VydmVyIENBMB4XDTE4MDUwODAwMDAwMFoXDTIwMDYwMzEyMDAwMFowgccxHTAbBgNVBA8MFFByaXZhdGUgT3JnYW5pemF0aW9uMRMwEQYLKwYBBAGCNzwCAQMTAlVTMRkwFwYLKwYBBAGCNzwCAQITCERlbGF3YXJlMRAwDgYDVQQFEwc1MTU3NTUwMQswCQYDVQQGEwJVUzETMBEGA1UECBMKQ2FsaWZvcm5pYTEWMBQGA1UEBxMNU2FuIEZyYW5jaXNjbzEVMBMGA1UEChMMR2l0SHViLCBJbmMuMRMwEQYDVQQDEwpnaXRodWIuY29tMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAxjyq8jyXDDrBTyitcnB90865tWBzpHSbindG%2FXqYQkzFMBlXmqkzC%2BFdTRBYyneZw5Pz%2BXWQvL%2B74JW6LsWNc2EF0xCEqLOJuC9zjPAqbr7uroNLghGxYf13YdqbG5oj%2F4x%2BogEG3dF%2FU5YIwVr658DKyESMV6eoYV9mDVfTuJastkqcwero%2B5ZAKfYVMLUEsMwFtoTDJFmVf6JlkOWwsxp1WcQ%2FMRQK1cyqOoUFUgYylgdh3yeCDPeF22Ax8AlQxbcaI%2BGwfQL1FB7Jy%2Bh%2BKjME9lE%2FUpgV6Qt2R1xNSmvFCBWu%2BNFX6epwFP%2FJRbkMfLz0beYFUvmMgLtwVpEPSwIDAQABo4IDeTCCA3UwHwYDVR0jBBgwFoAUPdNQpdagre7zSmAKZdMh1Pj41g8wHQYDVR0OBBYEFMnCU2FmnV%2BrJfQmzQ84mqhJ6kipMCUGA1UdEQQeMByCCmdpdGh1Yi5jb22CDnd3dy5naXRodWIuY29tMA4GA1UdDwEB%2FwQEAwIFoDAdBgNVHSUEFjAUBggrBgEFBQcDAQYIKwYBBQUHAwIwdQYDVR0fBG4wbDA0oDKgMIYuaHR0cDovL2NybDMuZGlnaWNlcnQuY29tL3NoYTItZXYtc2VydmVyLWcyLmNybDA0oDKgMIYuaHR0cDovL2NybDQuZGlnaWNlcnQuY29tL3NoYTItZXYtc2VydmVyLWcyLmNybDBLBgNVHSAERDBCMDcGCWCGSAGG%2FWwCATAqMCgGCCsGAQUFBwIBFhxodHRwczovL3d3dy5kaWdpY2VydC5jb20vQ1BTMAcGBWeBDAEBMIGIBggrBgEFBQcBAQR8MHowJAYIKwYBBQUHMAGGGGh0dHA6Ly9vY3NwLmRpZ2ljZXJ0LmNvbTBSBggrBgEFBQcwAoZGaHR0cDovL2NhY2VydHMuZGlnaWNlcnQuY29tL0RpZ2lDZXJ0U0hBMkV4dGVuZGVkVmFsaWRhdGlvblNlcnZlckNBLmNydDAMBgNVHRMBAf8EAjAAMIIBfgYKKwYBBAHWeQIEAgSCAW4EggFqAWgAdgCkuQmQtBhYFIe7E6LMZ3AKPDWYBPkb37jjd80OyA3cEAAAAWNBYm0KAAAEAwBHMEUCIQDRZp38cTWsWH2GdBpe%2FuPTWnsu%2Fm4BEC2%2BdIcvSykZYgIgCP5gGv6yzaazxBK2NwGdmmyuEFNSg2pARbMJlUFgU5UAdgBWFAaaL9fC7NP14b1Esj7HRna5vJkRXMDvlJhV1onQ3QAAAWNBYm0tAAAEAwBHMEUCIQCi7omUvYLm0b2LobtEeRAYnlIo7n6JxbYdrtYdmPUWJQIgVgw1AZ51vK9ENinBg22FPxb82TvNDO05T17hxXRC2IYAdgC72d%2B8H4pxtZOUI5eqkntHOFeVCqtS6BqQlmQ2jh7RhQAAAWNBYm3fAAAEAwBHMEUCIQChzdTKUU2N%2BXcqcK0OJYrN8EYynloVxho4yPk6Dq3EPgIgdNH5u8rC3UcslQV4B9o0a0w204omDREGKTVuEpxGeOQwDQYJKoZIhvcNAQELBQADggEBAHAPWpanWOW%2Fip2oJ5grAH8mqQfaunuCVE%2Bvac%2B88lkDK%2FLVdFgl2B6kIHZiYClzKtfczG93hWvKbST4NRNHP9LiaQqdNC17e5vNHnXVUGw%2ByxyjMLGqkgepOnZ2Rb14kcTOGp4i5AuJuuaMwXmCo7jUwPwfLe1NUlVBKqg6LK0Hcq4K0sZnxE8HFxiZ92WpV2AVWjRMEc%2F2z2shNoDvxvFUYyY1Oe67xINkmyQKc%2BygSBZzyLnXSFVWmHr3u5dcaaQGGAR42v6Ydr4iL38Hd4dOiBma%2BFXsXBIqWUjbST4VXmdaol7uzFMojA4zkxQDZAvF5XgJlAFadfySna%2Fteik%3D";
/* import-globals-from ./adjustedCerts.js */
Services.scriptloader.loadSubScript(
"chrome://mochitests/content/browser/toolkit/components/certviewer/tests/browser/adjustedCerts.js",
this
);
add_task(async function test() {
Assert.ok(adjustedCerts, "adjustedCerts found");
let tabName = adjustedCerts.tabName;
let certItems = adjustedCerts.certItems;
await BrowserTestUtils.withNewTab(url, async function(browser) {
await ContentTask.spawn(browser, [certItems, tabName], async function([
adjustedCerts,
expectedTabName,
]) {
let certificateSection = await ContentTaskUtils.waitForCondition(() => {
return content.document.querySelector("certificate-section");
}, "Certificate section found");
let infoGroups = certificateSection.shadowRoot.querySelectorAll(
"info-group"
);
Assert.ok(infoGroups, "infoGroups found");
Assert.equal(
infoGroups.length,
adjustedCerts.length,
"infoGroups must have the same length of adjustedCerts"
);
let tabName = certificateSection.shadowRoot.querySelector(
".certificate-tabs"
).children[0].innerText;
Assert.equal(tabName, expectedTabName, "Tab name should be the same");
function getElementByAttribute(source, property, target) {
for (let elem of source) {
if (elem.hasOwnProperty(property) && elem[property] === target) {
return elem;
}
}
return null;
}
for (let infoGroup of infoGroups) {
let sectionTitle = infoGroup.shadowRoot.querySelector(
".info-group-title"
).innerText;
let adjustedCertsElem = getElementByAttribute(
adjustedCerts,
"sectionTitle",
sectionTitle
);
Assert.ok(adjustedCertsElem, "The element exists in adjustedCerts");
let infoItems = infoGroup.shadowRoot.querySelectorAll("info-item");
Assert.equal(
infoItems.length,
adjustedCertsElem.sectionItems.length,
"sectionItems must be the same length"
);
let i = 0;
for (let infoItem of infoItems) {
let infoItemLabel = infoItem.shadowRoot
.querySelector("label")
.getAttribute("data-l10n-id");
let infoItemInfo = infoItem.shadowRoot.children[2].innerText;
let adjustedCertsElemLabel = adjustedCertsElem.sectionItems[i].label;
if (adjustedCertsElemLabel == null) {
adjustedCertsElemLabel = "";
}
adjustedCertsElemLabel = adjustedCertsElemLabel
.replace(/\s+/g, "-")
.toLowerCase();
let adjustedCertsElemInfo = adjustedCertsElem.sectionItems[i].info;
if (adjustedCertsElemInfo == null) {
adjustedCertsElemInfo = "";
}
if (typeof adjustedCertsElemInfo !== "string") {
// there is a case where we have a boolean
adjustedCertsElemInfo = adjustedCertsElemInfo.toString();
}
Assert.ok(
infoItemLabel.includes(adjustedCertsElemLabel),
"data-l10n-id must contain the original label"
);
if (
// we are skiping this cases because we are going to compare them
// with their UTC, e.g: timestampUTC
!(
adjustedCertsElemLabel === "timestamp" ||
adjustedCertsElemLabel === "not-after" ||
adjustedCertsElemLabel === "not-before"
)
) {
Assert.equal(
infoItemInfo,
adjustedCertsElemInfo,
"Info must be equal"
);
}
i++;
}
}
});
});
});