Bug 1712368 - update mp4parse-rust to dd96773. r=kinetik

Differential Revision: https://phabricator.services.mozilla.com/D115718
This commit is contained in:
Jon Bauman 2021-05-22 16:28:08 +00:00
parent d289e4d878
commit 71b1259afd
37 changed files with 1319 additions and 718 deletions

View File

@ -25,7 +25,7 @@ tag = "v0.4.24"
[source."https://github.com/mozilla/mp4parse-rust"]
git = "https://github.com/mozilla/mp4parse-rust"
replace-with = "vendored-sources"
rev = "94c3b1f368c82aefcbf51967f6aa296a9ccceb69"
rev = "dd967732ac836cc1ed5f1a7259d912c5d13e009b"
[source."https://github.com/mozilla/cubeb-pulse-rs"]
git = "https://github.com/mozilla/cubeb-pulse-rs"

4
Cargo.lock generated
View File

@ -3257,7 +3257,7 @@ dependencies = [
[[package]]
name = "mp4parse"
version = "0.11.5"
source = "git+https://github.com/mozilla/mp4parse-rust?rev=94c3b1f368c82aefcbf51967f6aa296a9ccceb69#94c3b1f368c82aefcbf51967f6aa296a9ccceb69"
source = "git+https://github.com/mozilla/mp4parse-rust?rev=dd967732ac836cc1ed5f1a7259d912c5d13e009b#dd967732ac836cc1ed5f1a7259d912c5d13e009b"
dependencies = [
"bitreader",
"byteorder",
@ -3276,7 +3276,7 @@ version = "0.1.0"
[[package]]
name = "mp4parse_capi"
version = "0.11.5"
source = "git+https://github.com/mozilla/mp4parse-rust?rev=94c3b1f368c82aefcbf51967f6aa296a9ccceb69#94c3b1f368c82aefcbf51967f6aa296a9ccceb69"
source = "git+https://github.com/mozilla/mp4parse-rust?rev=dd967732ac836cc1ed5f1a7259d912c5d13e009b#dd967732ac836cc1ed5f1a7259d912c5d13e009b"
dependencies = [
"byteorder",
"fallible_collections",

View File

@ -69,7 +69,7 @@ class AVIFParser {
mp4parse_avif_get_image(mParser.get(), mAvifImage.ptr());
MOZ_LOG(sAVIFLog, LogLevel::Debug,
("[this=%p] mp4parse_avif_get_image -> %d; primary_item length: "
"%u, alpha_item length: %u",
"%zu, alpha_item length: %zu",
this, status, mAvifImage->primary_item.length,
mAvifImage->alpha_item.length));
if (status != MP4PARSE_STATUS_OK) {
@ -83,14 +83,20 @@ class AVIFParser {
private:
explicit AVIFParser(const Mp4parseIo* aIo) : mIo(aIo) {
MOZ_ASSERT(mIo);
MOZ_LOG(sAVIFLog, LogLevel::Debug, ("Create AVIFParser=%p", this));
MOZ_LOG(sAVIFLog, LogLevel::Debug,
("Create AVIFParser=%p, image.avif.compliance_strictness: %d", this,
StaticPrefs::image_avif_compliance_strictness()));
}
bool Init() {
MOZ_ASSERT(!mParser);
Mp4parseAvifParser* parser = nullptr;
Mp4parseStatus status = mp4parse_avif_new(mIo, &parser);
Mp4parseStatus status =
mp4parse_avif_new(mIo,
static_cast<enum Mp4parseStrictness>(
StaticPrefs::image_avif_compliance_strictness()),
&parser);
MOZ_LOG(sAVIFLog, LogLevel::Debug,
("[this=%p] mp4parse_avif_new status: %d", this, status));
if (status != MP4PARSE_STATUS_OK) {

View File

@ -5507,6 +5507,16 @@
#endif
mirror: always
# How strict we are in accepting/rejecting AVIF inputs according to whether they
# conform to the specification
# 0 = Permissive: accept whatever we can simply, unambiguously interpret
# 1 = Normal: reject violations of "shall" specification directives
# 2 = Strict: reject violations of "should" specification directives
- name: image.avif.compliance_strictness
type: RelaxedAtomicInt32
value: 1
mirror: always
# Whether we use dav1d (true) or libaom (false) to decode AVIF image
- name: image.avif.use-dav1d
type: RelaxedAtomicBool

0
third_party/rust/ashmem/Cargo.toml vendored Normal file → Executable file
View File

0
third_party/rust/ashmem/src/lib.rs vendored Normal file → Executable file
View File

View File

@ -1 +1 @@
{"files":{"Cargo.toml":"20d60a51a713582d285ab994d13014f101450a359f5ada589b82ea97615ae2e8","benches/avif_benchmark.rs":"e4bdd69c7b434448ef7080bbf488b2f64c2c88121fe1c1f7e9d9c8def3d648ae","src/boxes.rs":"c11c408e4be679233cde528c1194ab566456da99bf1acf9a67fecfe2251da84f","src/lib.rs":"a35dc6240437f41ef73cc890e59d4c4ad63881d4ab13880c5b2b4d21a2297cce","src/macros.rs":"76c840f9299797527fe71aa5b378ffb01312767372b45cc62deddb19775400ae","src/tests.rs":"7598fba59c47dc0f713c5a71fa477d046b595ba6362c6e32d775e33197770055","tests/bbb_sunflower_QCIF_30fps_h263_noaudio_1f.3gp":"03e5b1264d0a188d77b9e676ba3ce23a801b17aaa11c0343dfd851d6ea4e3a40","tests/bug-1661347.avif":"a4741189d897401c746492d780bccf4c42dddde8f46d01a791f9656aac2ab164","tests/corrupt/bad-ipma-flags.avif":"ecde7997b97db1910b9dcc7ca8e3c8957da0e83681ea9008c66dc9f12b78ad19","tests/corrupt/bad-ipma-version.avif":"7f9a1a0b4ebbf8d800d22eaae5ff78970cc6b811317db6c1467c6883952b7c9b","tests/corrupt/bug-1655846.avif":"e0a5a06225800fadf05f5352503a4cec11af73eef705c43b4acab5f4a99dea50","tests/corrupt/bug-1661347.avif":"31c26561e1d9eafb60f7c5968b82a0859d203d73f17f26b29276256acee12966","tests/kodim-extents.avif":"e4de6d124535206c3daca797e64cccc6a5b439f93733af52a95b1e82d581a78b","tests/overflow.rs":"16b591d8def1a155b3b997622f6ea255536870d99c3d8f97c51755b77a50de3c","tests/public.rs":"07d4a2a0b94007b85b8e581c3d3f0e49e1598980a502dd2577e4da600c9275fa"},"package":null}
{"files":{"Cargo.toml":"e05ce4c7205fd0efb6de26f9b16cb2dce3b16527482f66d894a457ae741f7792","benches/avif_benchmark.rs":"cd99c0dde025ab40d2cd860f53dc697a1587a48c164c3e5c8adfd40add29d772","src/boxes.rs":"0066b90963c50031e850e1d6c20b99537703236cb7f215851842e130562d9c46","src/lib.rs":"9323e160a8bbf2b764b3088c65e5d9483bb09b7a6bb79a4424c07f1780f46af2","src/macros.rs":"76c840f9299797527fe71aa5b378ffb01312767372b45cc62deddb19775400ae","src/tests.rs":"babce5800634d2db0400dedba324431aba45cda558aa09feec79e50a288dd4c0","src/unstable.rs":"5a0edd7f803fdc355a46bc8142f29aca976d5bbb24cc5d94a3d7c61d0cf4634a","tests/amr_nb_1f.3gp":"d1423e3414ad06b69f8b58d5c916ec353ba2d0402d99dec9f1c88acc33b6a127","tests/amr_wb_1f.3gp":"be635b24097e8757b0c04d70ab28e00417ca113e86108b6c269b79b64b89bcd5","tests/bbb_sunflower_QCIF_30fps_h263_noaudio_1f.3gp":"03e5b1264d0a188d77b9e676ba3ce23a801b17aaa11c0343dfd851d6ea4e3a40","tests/bug-1661347.avif":"a4741189d897401c746492d780bccf4c42dddde8f46d01a791f9656aac2ab164","tests/corrupt/av1C-missing-essential.avif":"a1501254c4071847b2269fe40b81409c389ff14e91cf7c0005a47e6ea97a6803","tests/corrupt/bad-ipma-flags.avif":"ecde7997b97db1910b9dcc7ca8e3c8957da0e83681ea9008c66dc9f12b78ad19","tests/corrupt/bad-ipma-version.avif":"7f9a1a0b4ebbf8d800d22eaae5ff78970cc6b811317db6c1467c6883952b7c9b","tests/corrupt/bug-1655846.avif":"e0a5a06225800fadf05f5352503a4cec11af73eef705c43b4acab5f4a99dea50","tests/corrupt/bug-1661347.avif":"31c26561e1d9eafb60f7c5968b82a0859d203d73f17f26b29276256acee12966","tests/corrupt/clap-missing-essential.avif":"1cc1cf7369ceb91a8720da1059e257ed426aabbb89dbe599b66f53871d228228","tests/corrupt/hdlr-not-first.avif":"2c29308af077209b9c984921b7e36f8fb7ca7cf379cf8eba4c7a91f65bc7a304","tests/corrupt/hdlr-not-pict.avif":"9fe37619606645a95725300a9e34fada9190d1e0b3919881db84353941ca9291","tests/corrupt/imir-missing-essential.avif":"b1226e4b1358528befbd3f1126b5caf0c5051b4354777b87e71f6001f3829f87","tests/corrupt/ipma-duplicate-version-and-flags.avif":"cf8e15ec4b210235f3d68332a1adeb64e35c41b8d8e1e7586ae38b6d9cd8926c","tests/corrupt/irot-missing-essential.avif":"b7da1fc1d1b45bb1b7ca3494476e052f711d794a6d010df6870872ed8b9da10e","tests/corrupt/no-hdlr.avif":"91a1eb70c7b6adf2104e471d7deeeb98084a591d64ce09ba106c27edfbc3a409","tests/corrupt/no-mif1.avif":"1442aa6ffaeb9512724287768bfd1850d3aa29a651ef05abb33e5dec2b3ee5c2","tests/corrupt/no-pixi-for-alpha.avif":"f8adc3573c79ee25bf6d4dd2693c61661469b28f86a5c7b1d9e41b0e8d2d53bb","tests/corrupt/no-pixi.avif":"4b1776def440dc8b913c170e4479772ee6bbb299b8679f7c564704bd03c9597e","tests/multiple-extents.avif":"b5549ac68793e155a726d754e565cea0da03fa17833d3545f45c79e13f4c9360","tests/overflow.rs":"16b591d8def1a155b3b997622f6ea255536870d99c3d8f97c51755b77a50de3c","tests/public.rs":"eae6b3f3d09a79944ec9dbbc5cb5dbc62e83ddc7e11d1f9e551d02f6ea9e699a"},"package":null}

View File

@ -40,6 +40,11 @@ test-assembler = "0.1.2"
walkdir = "2.3.1"
criterion = "0.3"
[features]
3gpp = []
meta-xml = []
unstable-api = []
[[bench]]
name = "avif_benchmark"
harness = false

View File

@ -8,7 +8,7 @@ use criterion::{criterion_group, criterion_main, Criterion};
use std::fs::File;
fn criterion_benchmark(c: &mut Criterion) {
c.bench_function("avif_largest", |b| b.iter(|| avif_largest()));
c.bench_function("avif_largest", |b| b.iter(avif_largest));
}
criterion_group!(benches, criterion_benchmark);
@ -19,5 +19,5 @@ fn avif_largest() {
"av1-avif/testFiles/Netflix/avif/cosmos_frame05000_yuv444_12bpc_bt2020_pq_qlossless.avif",
)
.expect("Unknown file");
assert!(mp4::read_avif(input).is_ok());
assert!(mp4::read_avif(input, mp4::ParseStrictness::Normal).is_ok());
}

View File

@ -31,10 +31,10 @@ macro_rules! box_database {
}
}
impl Into<u32> for BoxType {
fn into(self) -> u32 {
impl From<BoxType> for u32 {
fn from(b: BoxType) -> u32 {
use self::BoxType::*;
match self {
match b {
$($(#[$attr])* $boxenum => $boxtype),*,
UnknownBox(t) => t,
}
@ -46,7 +46,7 @@ macro_rules! box_database {
impl fmt::Debug for BoxType {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
let fourcc: FourCC = From::from(self.clone());
let fourcc: FourCC = From::from(*self);
fourcc.fmt(f)
}
}
@ -116,13 +116,16 @@ box_database!(
MediaHeaderBox 0x6d64_6864, // "mdhd"
HandlerBox 0x6864_6c72, // "hdlr"
MediaInformationBox 0x6d69_6e66, // "minf"
ImageReferenceBox 0x6972_6566, // "iref"
ImagePropertiesBox 0x6970_7270, // "iprp"
ItemReferenceBox 0x6972_6566, // "iref"
ItemPropertiesBox 0x6970_7270, // "iprp"
ItemPropertyContainerBox 0x6970_636f, // "ipco"
ItemPropertyAssociationBox 0x6970_6d61, // "ipma"
ColorInformationBox 0x636f_6c72, // "colr"
PixelInformationBox 0x7069_7869, // "pixi"
AuxiliaryTypeProperty 0x6175_7843, // "auxC"
CleanApertureBox 0x636c_6170, // "clap"
ImageRotation 0x6972_6f74, // "irot"
ImageMirror 0x696d_6972, // "imir"
SampleTableBox 0x7374_626c, // "stbl"
SampleDescriptionBox 0x7374_7364, // "stsd"
TimeToSampleBox 0x7374_7473, // "stts"
@ -138,6 +141,12 @@ box_database!(
H263SpecificBox 0x6432_3633, // "d263"
MP4AudioSampleEntry 0x6d70_3461, // "mp4a"
MP4VideoSampleEntry 0x6d70_3476, // "mp4v"
#[cfg(feature = "3gpp")]
AMRNBSampleEntry 0x7361_6d72, // "samr" - AMR narrow-band
#[cfg(feature = "3gpp")]
AMRWBSampleEntry 0x7361_7762, // "sawb" - AMR wide-band
#[cfg(feature = "3gpp")]
AMRSpecificBox 0x6461_6d72, // "damr"
ESDBox 0x6573_6473, // "esds"
VP8SampleEntry 0x7670_3038, // "vp08"
VP9SampleEntry 0x7670_3039, // "vp09"
@ -170,6 +179,10 @@ box_database!(
MetadataItemListEntry 0x696c_7374, // "ilst"
MetadataItemDataEntry 0x6461_7461, // "data"
MetadataItemNameBox 0x6e61_6d65, // "name"
#[cfg(feature = "meta-xml")]
MetadataXMLBox 0x786d_6c20, // "xml "
#[cfg(feature = "meta-xml")]
MetadataBXMLBox 0x6278_6d6c, // "bxml"
UserdataBox 0x7564_7461, // "udta"
AlbumEntry 0xa961_6c62, // "©alb"
ArtistEntry 0xa941_5254, // "©ART"

View File

@ -4,6 +4,14 @@
// License, v. 2.0. If a copy of the MPL was not distributed with this
// file, You can obtain one at https://mozilla.org/MPL/2.0/.
// `clippy::upper_case_acronyms` is a nightly-only lint as of 2021-03-15, so we
// allow `clippy::unknown_clippy_lints` to ignore it on stable - but
// `clippy::unknown_clippy_lints` has been renamed in nightly, so we need to
// allow `renamed_and_removed_lints` to ignore a warning for that.
#![allow(renamed_and_removed_lints)]
#![allow(clippy::unknown_clippy_lints)]
#![allow(clippy::upper_case_acronyms)]
#[macro_use]
extern crate log;
@ -31,6 +39,9 @@ use boxes::{BoxType, FourCC};
#[cfg(test)]
mod tests;
#[cfg(feature = "unstable-api")]
pub mod unstable;
// Arbitrary buffer size limit used for raw read_bufs on a box.
const BUF_SIZE_LIMIT: u64 = 10 * 1024 * 1024;
@ -38,9 +49,15 @@ const BUF_SIZE_LIMIT: u64 = 10 * 1024 * 1024;
// frame per table entry in 30 fps.
const TABLE_SIZE_LIMIT: u32 = 30 * 60 * 60 * 24 * 7;
/// The 'mif1' brand indicates structural requirements on files
/// See HEIF (ISO 23008-12:2017) § 10.2.1
const MIF1_BRAND: FourCC = FourCC { value: *b"mif1" };
/// A trait to indicate a type can be infallibly converted to `u64`.
/// This should only be implemented for infallible conversions, so only unsigned types are valid.
trait ToU64 {
// Remove when https://github.com/rust-lang/rust-clippy/issues/6727 is resolved
#[allow(clippy::wrong_self_convention)]
fn to_u64(self) -> u64;
}
@ -59,6 +76,8 @@ impl ToU64 for usize {
/// A trait to indicate a type can be infallibly converted to `usize`.
/// This should only be implemented for infallible conversions, so only unsigned types are valid.
pub trait ToUsize {
// Remove when https://github.com/rust-lang/rust-clippy/issues/6727 is resolved
#[allow(clippy::wrong_self_convention)]
fn to_usize(self) -> usize;
}
@ -399,6 +418,8 @@ pub enum AudioCodecSpecific {
ALACSpecificBox(ALACSpecificBox),
MP3,
LPCM,
#[cfg(feature = "3gpp")]
AMRSpecificBox(TryVec<u8>),
}
#[derive(Debug)]
@ -459,6 +480,7 @@ pub struct VPxConfigBox {
pub codec_init: TryVec<u8>,
}
/// See AV1-ISOBMFF § 2.3.3 https://aomediacodec.github.io/av1-isobmff/#av1codecconfigurationbox-syntax
#[derive(Debug)]
pub struct AV1ConfigBox {
pub profile: u8,
@ -737,6 +759,19 @@ pub struct MetadataBox {
pub sort_album_artist: Option<TryString>,
/// The name of the composer to sort by 'soco'
pub sort_composer: Option<TryString>,
/// Metadata
#[cfg(feature = "meta-xml")]
pub xml: Option<XmlBox>,
}
/// See ISOBMFF (ISO 14496-12:2015) § 8.11.2.1
#[cfg(feature = "meta-xml")]
#[derive(Debug)]
pub enum XmlBox {
/// XML metadata
StringXmlBox(TryString),
/// Binary XML metadata
BinaryXmlBox(TryVec<u8>),
}
/// Internal data structures.
@ -748,6 +783,8 @@ pub struct MediaContext {
pub mvex: Option<MovieExtendsBox>,
pub psshs: TryVec<ProtectionSystemSpecificHeaderBox>,
pub userdata: Option<Result<UserdataBox>>,
#[cfg(feature = "meta-xml")]
pub metadata: Option<Result<MetadataBox>>,
}
/// An ISOBMFF item as described by an iloc box. For the sake of avoiding copies,
@ -801,7 +838,7 @@ impl AvifContext {
"AvifItem::Location requires the location exists in AvifContext::item_storage"
);
}
AvifItem::Data(data) => return data.as_slice(),
AvifItem::Data(data) => data.as_slice(),
}
}
}
@ -1018,7 +1055,7 @@ enum IlocFieldSize {
}
impl IlocFieldSize {
fn to_bits(&self) -> u8 {
fn as_bits(&self) -> u8 {
match self {
IlocFieldSize::Zero => 0,
IlocFieldSize::Four => 32,
@ -1107,6 +1144,25 @@ impl Default for TrackType {
}
}
// This type is used by mp4parse_capi since it needs to be passed from FFI consumers
// The C-visible struct is renamed via mp4parse_capi/cbindgen.toml to match naming conventions
#[repr(C)]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum ParseStrictness {
Permissive, // Error only on ambiguous inputs
Normal, // Error on "shall" directives, log warnings for "should"
Strict, // Error on "should" directives
}
fn fail_if(violation: bool, message: &'static str) -> Result<()> {
if violation {
Err(Error::InvalidData(message))
} else {
warn!("{}", message);
Ok(())
}
}
#[derive(Debug, Clone, Copy, PartialEq)]
pub enum CodecType {
Unknown,
@ -1124,6 +1180,10 @@ pub enum CodecType {
LPCM, // QT
ALAC,
H263,
#[cfg(feature = "3gpp")]
AMRNB,
#[cfg(feature = "3gpp")]
AMRWB,
}
impl Default for CodecType {
@ -1367,9 +1427,11 @@ fn skip_box_remain<T: Read>(src: &mut BMFFBox<T>) -> Result<()> {
}
/// Read the contents of an AVIF file
pub fn read_avif<T: Read>(f: &mut T) -> Result<AvifContext> {
pub fn read_avif<T: Read>(f: &mut T, strictness: ParseStrictness) -> Result<AvifContext> {
let _ = env_logger::try_init();
debug!("read_avif(strictness: {:?})", strictness);
let mut f = OffsetReader::new(f);
let mut iter = BoxIter::new(&mut f);
@ -1378,8 +1440,16 @@ pub fn read_avif<T: Read>(f: &mut T) -> Result<AvifContext> {
if let Some(mut b) = iter.next_box()? {
if b.head.name == BoxType::FileTypeBox {
let ftyp = read_ftyp(&mut b)?;
if !ftyp.compatible_brands.contains(&FourCC::from(*b"mif1")) {
return Err(Error::InvalidData("compatible_brands must contain 'mif1'"));
if !ftyp.compatible_brands.contains(&MIF1_BRAND) {
// This mandatory inclusion of this brand is in the process of being changed
// to optional. In anticipation of that, only give an error in strict mode
// See https://github.com/MPEGGroup/MIAF/issues/5
// and https://github.com/MPEGGroup/FileFormat/issues/23
fail_if(
strictness == ParseStrictness::Strict,
"The FileTypeBox should contain 'mif1' in the compatible_brands list \
per MIAF (ISO 23000-22:2019) § 7.2.1.2",
)?;
}
} else {
return Err(Error::InvalidData("'ftyp' box must occur first"));
@ -1398,7 +1468,7 @@ pub fn read_avif<T: Read>(f: &mut T) -> Result<AvifContext> {
"There should be zero or one meta boxes per ISOBMFF (ISO 14496-12:2015) § 8.11.1.1",
));
}
meta = Some(read_avif_meta(&mut b)?);
meta = Some(read_avif_meta(&mut b, strictness)?);
}
BoxType::MediaDataBox => {
if b.bytes_left() > 0 {
@ -1508,8 +1578,31 @@ pub fn read_avif<T: Read>(f: &mut T) -> Result<AvifContext> {
assert!(item.is_some());
}
let primary_item =
primary_item.ok_or(Error::InvalidData("Required primary item data not found"))?;
let primary_item = primary_item.ok_or(Error::InvalidData(
"Missing 'pitm' box, required per HEIF (ISO/IEC 23008-12:2017) § 10.2.1",
))?;
let mut has_primary_pixi = false;
let mut has_alpha_pixi = false;
for assoc_prop in meta.properties {
if let ItemProperty::Channels(_) = assoc_prop.property {
if assoc_prop.item_id == meta.primary_item_id {
has_primary_pixi = true;
} else if Some(assoc_prop.item_id) == alpha_item_id {
has_alpha_pixi = true;
}
}
}
if !has_primary_pixi || (alpha_item.is_some() && !has_alpha_pixi) {
fail_if(
strictness != ParseStrictness::Permissive,
"The pixel information property shall be associated with every image \
that is displayable (not hidden) \
per MIAF (ISO/IEC 23000-22:2019) specification § 7.3.6.6",
)?;
}
// We could potentially optimize memory usage by trying to avoid reading
// or storing mdat boxes which aren't used by our API, but for now it seems
@ -1526,13 +1619,17 @@ pub fn read_avif<T: Read>(f: &mut T) -> Result<AvifContext> {
/// Currently requires the primary item to be an av01 item type and generates
/// an error otherwise.
/// See ISOBMFF (ISO 14496-12:2015) § 8.11.1
fn read_avif_meta<T: Read + Offset>(src: &mut BMFFBox<T>) -> Result<AvifMeta> {
fn read_avif_meta<T: Read + Offset>(
src: &mut BMFFBox<T>,
strictness: ParseStrictness,
) -> Result<AvifMeta> {
let version = read_fullbox_version_no_flags(src)?;
if version != 0 {
return Err(Error::Unsupported("unsupported meta version"));
}
let mut read_handler_box = false;
let mut primary_item_id = None;
let mut item_infos = None;
let mut iloc_items = None;
@ -1542,19 +1639,44 @@ fn read_avif_meta<T: Read + Offset>(src: &mut BMFFBox<T>) -> Result<AvifMeta> {
let mut iter = src.box_iter();
while let Some(mut b) = iter.next_box()? {
trace!("read_avif_meta parsing {:?} box", b.head.name);
if !read_handler_box && b.head.name != BoxType::HandlerBox {
fail_if(
strictness != ParseStrictness::Permissive,
"The HandlerBox shall be the first contained box within the MetaBox \
per MIAF (ISO 23000-22:2019) § 7.2.1.5",
)?;
}
match b.head.name {
BoxType::HandlerBox => {
if read_handler_box {
return Err(Error::InvalidData(
"There shall be exactly one hdlr box per ISOBMFF (ISO 14496-12:2015) § 8.4.3.1",
));
}
let HandlerBox { handler_type } = read_hdlr(&mut b)?;
if handler_type != b"pict" {
fail_if(
strictness != ParseStrictness::Permissive,
"The HandlerBox handler_type must be 'pict' \
per MIAF (ISO 23000-22:2019) § 7.2.1.5",
)?;
}
read_handler_box = true;
}
BoxType::ItemInfoBox => {
if item_infos.is_some() {
return Err(Error::InvalidData(
"There should be zero or one iinf boxes per ISOBMFF (ISO 14496-12:2015) § 8.11.6.1",
"There shall be zero or one iinf boxes per ISOBMFF (ISO 14496-12:2015) § 8.11.6.1",
));
}
item_infos = Some(read_iinf(&mut b)?);
item_infos = Some(read_iinf(&mut b, strictness)?);
}
BoxType::ItemLocationBox => {
if iloc_items.is_some() {
return Err(Error::InvalidData(
"There should be zero or one iloc boxes per ISOBMFF (ISO 14496-12:2015) § 8.11.3.1",
"There shall be zero or one iloc boxes per ISOBMFF (ISO 14496-12:2015) § 8.11.3.1",
));
}
iloc_items = Some(read_iloc(&mut b)?);
@ -1562,22 +1684,22 @@ fn read_avif_meta<T: Read + Offset>(src: &mut BMFFBox<T>) -> Result<AvifMeta> {
BoxType::PrimaryItemBox => {
if primary_item_id.is_some() {
return Err(Error::InvalidData(
"There should be zero or one pitm boxes per ISOBMFF (ISO 14496-12:2015) § 8.11.4.1",
"There shall be zero or one pitm boxes per ISOBMFF (ISO 14496-12:2015) § 8.11.4.1",
));
}
primary_item_id = Some(read_pitm(&mut b)?);
}
BoxType::ImageReferenceBox => {
BoxType::ItemReferenceBox => {
if item_references.is_some() {
return Err(Error::InvalidData("There should be zero or one iref boxes"));
return Err(Error::InvalidData("There shall be zero or one iref boxes per ISOBMFF (ISO 14496-12:2015) § 8.11.12.1"));
}
item_references = Some(read_iref(&mut b)?);
}
BoxType::ImagePropertiesBox => {
BoxType::ItemPropertiesBox => {
if properties.is_some() {
return Err(Error::InvalidData("There should be zero or one iprp boxes"));
return Err(Error::InvalidData("There shall be zero or one iprp boxes per HEIF (ISO 23008-12:2017) § 9.3.1"));
}
properties = Some(read_iprp(&mut b)?);
properties = Some(read_iprp(&mut b, MIF1_BRAND, strictness)?);
}
_ => skip_box_content(&mut b)?,
}
@ -1626,7 +1748,10 @@ fn read_pitm<T: Read>(src: &mut BMFFBox<T>) -> Result<u32> {
/// Parse an Item Information Box
/// See ISOBMFF (ISO 14496-12:2015) § 8.11.6
fn read_iinf<T: Read>(src: &mut BMFFBox<T>) -> Result<TryVec<ItemInfoEntry>> {
fn read_iinf<T: Read>(
src: &mut BMFFBox<T>,
strictness: ParseStrictness,
) -> Result<TryVec<ItemInfoEntry>> {
let version = read_fullbox_version_no_flags(src)?;
match version {
@ -1645,11 +1770,11 @@ fn read_iinf<T: Read>(src: &mut BMFFBox<T>) -> Result<TryVec<ItemInfoEntry>> {
while let Some(mut b) = iter.next_box()? {
if b.head.name != BoxType::ItemInfoEntry {
return Err(Error::InvalidData(
"iinf box should contain only infe boxes",
"iinf box shall contain only infe boxes per ISOBMFF (ISO 14496-12:2015) § 8.11.6.2",
));
}
item_infos.push(read_infe(&mut b)?)?;
item_infos.push(read_infe(&mut b, strictness)?)?;
check_parser_state!(b.content);
}
@ -1672,10 +1797,19 @@ impl std::fmt::Display for U32BE {
/// Parse an Item Info Entry
/// See ISOBMFF (ISO 14496-12:2015) § 8.11.6.2
fn read_infe<T: Read>(src: &mut BMFFBox<T>) -> Result<ItemInfoEntry> {
// According to the standard, it seems the flags field should be 0, but
// at least one sample AVIF image has a nonzero value.
let (version, _) = read_fullbox_extra(src)?;
fn read_infe<T: Read>(src: &mut BMFFBox<T>, strictness: ParseStrictness) -> Result<ItemInfoEntry> {
let (version, flags) = read_fullbox_extra(src)?;
// According to the standard, it seems the flags field shall be 0, but at
// least one sample AVIF image has a nonzero value.
// See https://github.com/AOMediaCodec/av1-avif/issues/146
if flags != 0 {
fail_if(
strictness == ParseStrictness::Strict,
"'infe' flags field shall be 0 \
per ISOBMFF (ISO 14496-12:2015) § 8.11.6.2",
)?;
}
// mif1 brand (see HEIF (ISO 23008-12:2017) § 10.2.1) only requires v2 and 3
let item_id = match version {
@ -1744,7 +1878,11 @@ fn read_iref<T: Read>(src: &mut BMFFBox<T>) -> Result<TryVec<SingleItemTypeRefer
/// Parse an Item Properties Box
/// See HEIF (ISO 23008-12:2017) § 9.3.1
fn read_iprp<T: Read>(src: &mut BMFFBox<T>) -> Result<TryVec<AssociatedProperty>> {
fn read_iprp<T: Read>(
src: &mut BMFFBox<T>,
brand: FourCC,
strictness: ParseStrictness,
) -> Result<TryVec<AssociatedProperty>> {
let mut iter = src.box_iter();
let mut properties = match iter.next_box()? {
@ -1753,9 +1891,6 @@ fn read_iprp<T: Read>(src: &mut BMFFBox<T>) -> Result<TryVec<AssociatedProperty>
None => Err(Error::UnexpectedEOF),
}?;
// Per HEIF (ISO 23008-12:2017) § 9.3.1: There can be zero or more ipma boxes
// but "There shall be at most one ItemPropertyAssociationbox with a given
// pair of values of version and flags"
let mut ipma_version_and_flag_values_seen = TryVec::with_capacity(1)?;
let mut associated = TryVec::new();
@ -1766,22 +1901,57 @@ fn read_iprp<T: Read>(src: &mut BMFFBox<T>) -> Result<TryVec<AssociatedProperty>
let (version, flags) = read_fullbox_extra(&mut b)?;
if ipma_version_and_flag_values_seen.contains(&(version, flags)) {
return Err(Error::InvalidData("Duplicate ipma with same version/flags"));
fail_if(
strictness != ParseStrictness::Permissive,
"There shall be at most one ItemPropertyAssociationbox with a given pair of \
values of version and flags \
per HEIF (ISO 23008-12:2017) § 9.3.1",
)?;
}
if flags != 0 && properties.len() <= 127 {
return Err(Error::InvalidData("flags should be equal to 0 unless there are more than 127 properties in the ItemPropertyContainerBox"));
fail_if(
strictness == ParseStrictness::Strict,
"Unless there are more than 127 properties in the ItemPropertyContainerBox, \
flags should be equal to 0 \
per HEIF (ISO 23008-12:2017) § 9.3.1",
)?;
}
ipma_version_and_flag_values_seen.push((version, flags))?;
let associations = read_ipma(&mut b, version, flags)?;
let associations = read_ipma(&mut b, strictness, version, flags)?;
for a in associations {
if a.property_index == 0 {
if a.essential {
return Err(Error::InvalidData("0 property index can't be essential"));
fail_if(
strictness != ParseStrictness::Permissive,
"the essential indicator shall be 0 for property index 0 \
per HEIF (ISO 23008-12:2017) § 9.3.3",
)?;
}
continue;
}
if let Some(property) = properties.remove(&a.property_index) {
if brand == MIF1_BRAND {
match property {
ItemProperty::AV1Config(_)
| ItemProperty::CleanAperture(_)
| ItemProperty::Mirroring(_)
| ItemProperty::Rotation(_) => {
if !a.essential {
fail_if(
strictness == ParseStrictness::Strict,
"All transformative properties associated with coded and \
derived images required or conditionally required by this \
document shall be marked as essential \
per MIAF (ISO 23000-22:2019) § 7.3.9",
)?;
}
}
_ => {}
}
} else {
unreachable!("read_iprp expects mif1 brand")
}
associated.push(AssociatedProperty {
item_id: a.item_id,
property,
@ -1796,10 +1966,14 @@ fn read_iprp<T: Read>(src: &mut BMFFBox<T>) -> Result<TryVec<AssociatedProperty>
}
/// See HEIF (ISO 23008-12:2017) § 9.3.1
#[derive(Debug, PartialEq)]
#[derive(Debug)]
pub enum ItemProperty {
Channels(TryVec<u8>),
AuxiliaryType(AuxiliaryTypeProperty),
AV1Config(AV1ConfigBox),
CleanAperture(CleanApertureBox),
Channels(TryVec<u8>),
Mirroring(ImageMirror),
Rotation(ImageRotation),
}
/// For storing ItemPropertyAssociation data
@ -1855,7 +2029,7 @@ macro_rules! impl_bounded_product {
impl $name {
pub fn new(value: $inner) -> Self {
assert!(<$inner>::from(value) <= Self::MAX);
assert!(value <= Self::MAX);
Self(value)
}
@ -1917,8 +2091,8 @@ impl std::ops::Add<U32MulU16> for U32MulU8 {
fn add(self, rhs: U32MulU16) -> Self::Output {
static_assertions::const_assert!(U32MulU8::MAX + U32MulU16::MAX < U64::MAX);
let lhs: u64 = self.get().into();
let rhs: u64 = rhs.get().into();
let lhs: u64 = self.get();
let rhs: u64 = rhs.get();
Self::Output::new(lhs.checked_add(rhs).expect("infallible"))
}
}
@ -1974,6 +2148,7 @@ fn calculate_ipma_total_associations(
/// See HEIF (ISO 23008-12:2017) § 9.3.1
fn read_ipma<T: Read>(
src: &mut BMFFBox<T>,
strictness: ParseStrictness,
version: u8,
flags: u32,
) -> Result<TryVec<Association>> {
@ -1997,6 +2172,7 @@ fn read_ipma<T: Read>(
};
if let Some(previous_association) = associations.last() {
#[allow(clippy::comparison_chain)]
if previous_association.item_id > item_id {
return Err(Error::InvalidData(
"Each ItemPropertyAssociation box shall be ordered by increasing item_ID",
@ -2031,9 +2207,11 @@ fn read_ipma<T: Read>(
}) = associations.last()
{
if *max_item_id <= u16::MAX.into() {
return Err(Error::InvalidData(
"The version 0 should be used unless 32-bit item_ID values are needed",
));
fail_if(
strictness == ParseStrictness::Strict,
"The ipma version 0 should be used unless 32-bit item_ID values are needed \
per HEIF (ISO 23008-12:2017) § 9.3.1",
)?;
}
}
}
@ -2050,8 +2228,12 @@ fn read_ipco<T: Read>(src: &mut BMFFBox<T>) -> Result<TryHashMap<u16, ItemProper
let mut iter = src.box_iter();
while let Some(mut b) = iter.next_box()? {
if let Some(property) = match b.head.name {
BoxType::PixelInformationBox => Some(ItemProperty::Channels(read_pixi(&mut b)?)),
BoxType::AuxiliaryTypeProperty => Some(ItemProperty::AuxiliaryType(read_auxc(&mut b)?)),
BoxType::AV1CodecConfigurationBox => Some(ItemProperty::AV1Config(read_av1c(&mut b)?)),
BoxType::CleanApertureBox => Some(ItemProperty::CleanAperture(read_clap(&mut b)?)),
BoxType::ImageMirror => Some(ItemProperty::Mirroring(read_imir(&mut b)?)),
BoxType::ImageRotation => Some(ItemProperty::Rotation(read_irot(&mut b)?)),
BoxType::PixelInformationBox => Some(ItemProperty::Channels(read_pixi(&mut b)?)),
_ => {
skip_box_remain(&mut b)?;
None
@ -2082,7 +2264,7 @@ fn read_pixi<T: Read>(src: &mut BMFFBox<T>) -> Result<TryVec<u8>> {
let mut channels = TryVec::with_capacity(num_channels)?;
let num_channels_read = src.try_read_to_end(&mut channels)?;
if num_channels_read != num_channels.into() {
if num_channels_read != num_channels {
return Err(Error::InvalidData("invalid num_channels"));
}
@ -2090,6 +2272,98 @@ fn read_pixi<T: Read>(src: &mut BMFFBox<T>) -> Result<TryVec<u8>> {
Ok(channels)
}
#[derive(Debug)]
pub struct U32Fraction {
numerator: u32,
denominator: u32,
}
fn read_u32_fraction<T: Read>(src: &mut BMFFBox<T>) -> Result<U32Fraction> {
let numerator = be_u32(src)?;
let denominator = be_u32(src)?;
Ok(U32Fraction {
numerator,
denominator,
})
}
#[derive(Debug)]
pub struct CleanApertureBox {
width: U32Fraction,
height: U32Fraction,
horizontal_offset: U32Fraction,
vertical_offset: U32Fraction,
}
/// Parse clean aperture box
/// See HEIF (ISO 23008-12:2017) § 6.5.9
fn read_clap<T: Read>(src: &mut BMFFBox<T>) -> Result<CleanApertureBox> {
let width = read_u32_fraction(src)?;
let height = read_u32_fraction(src)?;
let horizontal_offset = read_u32_fraction(src)?;
let vertical_offset = read_u32_fraction(src)?;
check_parser_state!(src.content);
Ok(CleanApertureBox {
width,
height,
horizontal_offset,
vertical_offset,
})
}
#[derive(Debug)]
pub enum ImageRotation {
Zero,
AntiClockwise90Degrees,
AntiClockwise180Degrees,
AntiClockwise270Degrees,
}
/// Parse image rotation box
/// See HEIF (ISO 23008-12:2017) § 6.5.10
fn read_irot<T: Read>(src: &mut BMFFBox<T>) -> Result<ImageRotation> {
let irot = src.read_into_try_vec()?;
let mut irot = BitReader::new(&irot);
let _reserved = irot.read_u8(6)?;
let image_rotation = match irot.read_u8(2)? {
0 => ImageRotation::Zero,
1 => ImageRotation::AntiClockwise90Degrees,
2 => ImageRotation::AntiClockwise180Degrees,
3 => ImageRotation::AntiClockwise270Degrees,
_ => unreachable!(),
};
check_parser_state!(src.content);
Ok(image_rotation)
}
#[derive(Debug)]
pub enum ImageMirror {
Vertical,
Horizontal,
}
/// Parse image mirroring box
/// See HEIF (ISO 23008-12:2017) § 6.5.12
fn read_imir<T: Read>(src: &mut BMFFBox<T>) -> Result<ImageMirror> {
let imir = src.read_into_try_vec()?;
let mut imir = BitReader::new(&imir);
let _reserved = imir.read_u8(7)?;
let image_mirror = match imir.read_u8(1)? {
0 => ImageMirror::Vertical,
1 => ImageMirror::Horizontal,
_ => unreachable!(),
};
check_parser_state!(src.content);
Ok(image_mirror)
}
/// See HEIF (ISO 23008-12:2017) § 6.5.8
#[derive(Debug, PartialEq)]
pub struct AuxiliaryTypeProperty {
@ -2183,7 +2457,7 @@ fn read_iloc<T: Read>(src: &mut BMFFBox<T>) -> Result<TryHashMap<u32, ItemLocati
));
}
let base_offset = iloc.read_u64(base_offset_size.to_bits())?;
let base_offset = iloc.read_u64(base_offset_size.as_bits())?;
let extent_count = iloc.read_u16(16)?;
if extent_count < 1 {
@ -2210,7 +2484,7 @@ fn read_iloc<T: Read>(src: &mut BMFFBox<T>) -> Result<TryHashMap<u32, ItemLocati
None | Some(IlocFieldSize::Zero) => None,
Some(index_size) => {
debug_assert!(version == IlocVersion::One || version == IlocVersion::Two);
Some(iloc.read_u64(index_size.to_bits())?)
Some(iloc.read_u64(index_size.as_bits())?)
}
};
@ -2218,8 +2492,8 @@ fn read_iloc<T: Read>(src: &mut BMFFBox<T>) -> Result<TryHashMap<u32, ItemLocati
// "If the offset is not identified (the field has a length of zero), then the
// beginning of the source (offset 0) is implied"
// This behavior will follow from BitReader::read_u64(0) -> 0.
let extent_offset = iloc.read_u64(offset_size.to_bits())?;
let extent_length = iloc.read_u64(length_size.to_bits())?.try_into()?;
let extent_offset = iloc.read_u64(offset_size.as_bits())?;
let extent_length = iloc.read_u64(length_size.as_bits())?.try_into()?;
// "If the length is not specified, or specified as zero, then the entire length of
// the source is implied" (ibid)
@ -2287,6 +2561,12 @@ pub fn read_mp4<T: Read>(f: &mut T) -> Result<MediaContext> {
BoxType::MovieBox => {
context = Some(read_moov(&mut b, context)?);
}
#[cfg(feature = "meta-xml")]
BoxType::MetadataBox => {
if let Some(ctx) = &mut context {
ctx.metadata = Some(read_meta(&mut b));
}
}
_ => skip_box_content(&mut b)?,
};
check_parser_state!(b.content);
@ -2332,6 +2612,8 @@ fn read_moov<T: Read>(f: &mut BMFFBox<T>, context: Option<MediaContext>) -> Resu
mut mvex,
mut psshs,
mut userdata,
#[cfg(feature = "meta-xml")]
metadata,
} = context.unwrap_or_default();
let mut iter = f.box_iter();
@ -2374,6 +2656,8 @@ fn read_moov<T: Read>(f: &mut BMFFBox<T>, context: Option<MediaContext>) -> Resu
mvex,
psshs,
userdata,
#[cfg(feature = "meta-xml")]
metadata,
})
}
@ -3027,6 +3311,7 @@ fn read_vpcc<T: Read>(src: &mut BMFFBox<T>) -> Result<VPxConfigBox> {
})
}
/// See AV1-ISOBMFF § 2.3.3 https://aomediacodec.github.io/av1-isobmff/#av1codecconfigurationbox-syntax
fn read_av1c<T: Read>(src: &mut BMFFBox<T>) -> Result<AV1ConfigBox> {
// We want to store the raw config as well as a structured (parsed) config, so create a copy of
// the raw config so we have it later, and then parse the structured data from that.
@ -3541,7 +3826,8 @@ fn read_alac<T: Read>(src: &mut BMFFBox<T>) -> Result<ALACSpecificBox> {
Ok(ALACSpecificBox { version, data })
}
/// Parse a hdlr box.
/// Parse a Handler Reference Box.
/// See ISOBMFF (ISO 14496-12:2015) § 8.4.3
fn read_hdlr<T: Read>(src: &mut BMFFBox<T>) -> Result<HandlerBox> {
let (_, _) = read_fullbox_extra(src)?;
@ -3705,7 +3991,7 @@ fn read_qt_wave_atom<T: Read>(src: &mut BMFFBox<T>) -> Result<ES_Descriptor> {
}
}
codec_specific.ok_or_else(|| Error::InvalidData("malformed audio sample entry"))
codec_specific.ok_or(Error::InvalidData("malformed audio sample entry"))
}
/// Parse an audio description inside an stsd box.
@ -3759,6 +4045,18 @@ fn read_audio_sample_entry<T: Read>(src: &mut BMFFBox<T>) -> Result<SampleEntry>
let (mut codec_type, mut codec_specific) = match name {
BoxType::MP3AudioSampleEntry => (CodecType::MP3, Some(AudioCodecSpecific::MP3)),
BoxType::LPCMAudioSampleEntry => (CodecType::LPCM, Some(AudioCodecSpecific::LPCM)),
// Some mp4 file with AMR doesn't have AMRSpecificBox "damr" in followed while loop,
// we use empty box by default.
#[cfg(feature = "3gpp")]
BoxType::AMRNBSampleEntry => (
CodecType::AMRNB,
Some(AudioCodecSpecific::AMRSpecificBox(Default::default())),
),
#[cfg(feature = "3gpp")]
BoxType::AMRWBSampleEntry => (
CodecType::AMRWB,
Some(AudioCodecSpecific::AMRSpecificBox(Default::default())),
),
_ => (CodecType::Unknown, None),
};
let mut protection_info = TryVec::new();
@ -3818,6 +4116,20 @@ fn read_audio_sample_entry<T: Read>(src: &mut BMFFBox<T>) -> Result<SampleEntry>
codec_type = CodecType::EncryptedAudio;
protection_info.push(sinf)?;
}
#[cfg(feature = "3gpp")]
BoxType::AMRSpecificBox => {
if codec_type != CodecType::AMRNB && codec_type != CodecType::AMRWB {
return Err(Error::InvalidData("malformed audio sample entry"));
}
let amr_dec_spec_struc_size = b
.head
.size
.checked_sub(b.head.offset)
.expect("offset invalid");
let amr_dec_spec_struc = read_buf(&mut b.content, amr_dec_spec_struc_size)?;
debug!("{:?} (AMRDecSpecStruc)", amr_dec_spec_struc);
codec_specific = Some(AudioCodecSpecific::AMRSpecificBox(amr_dec_spec_struc));
}
_ => {
debug!("Unsupported audio codec, box {:?} found", b.head.name);
skip_box_content(&mut b)?;
@ -4002,7 +4314,8 @@ fn read_udta<T: Read>(src: &mut BMFFBox<T>) -> Result<UserdataBox> {
Ok(udta)
}
/// Parse a metadata box inside a udta box
/// Parse the meta box
/// See ISOBMFF (ISO 14496-12:2015) § 8.111.
fn read_meta<T: Read>(src: &mut BMFFBox<T>) -> Result<MetadataBox> {
let (_, _) = read_fullbox_extra(src)?;
let mut iter = src.box_iter();
@ -4010,6 +4323,10 @@ fn read_meta<T: Read>(src: &mut BMFFBox<T>) -> Result<MetadataBox> {
while let Some(mut b) = iter.next_box()? {
match b.head.name {
BoxType::MetadataItemListEntry => read_ilst(&mut b, &mut meta)?,
#[cfg(feature = "meta-xml")]
BoxType::MetadataXMLBox => read_xml_(&mut b, &mut meta)?,
#[cfg(feature = "meta-xml")]
BoxType::MetadataBXMLBox => read_bxml(&mut b, &mut meta)?,
_ => skip_box_content(&mut b)?,
};
check_parser_state!(b.content);
@ -4017,6 +4334,28 @@ fn read_meta<T: Read>(src: &mut BMFFBox<T>) -> Result<MetadataBox> {
Ok(meta)
}
/// Parse a XML box inside a meta box
/// See ISOBMFF (ISO 14496-12:2015) § 8.11.2
#[cfg(feature = "meta-xml")]
fn read_xml_<T: Read>(src: &mut BMFFBox<T>, meta: &mut MetadataBox) -> Result<()> {
if read_fullbox_version_no_flags(src)? != 0 {
return Err(Error::Unsupported("unsupported XmlBox version"));
}
meta.xml = Some(XmlBox::StringXmlBox(src.read_into_try_vec()?));
Ok(())
}
/// Parse a Binary XML box inside a meta box
/// See ISOBMFF (ISO 14496-12:2015) § 8.11.2
#[cfg(feature = "meta-xml")]
fn read_bxml<T: Read>(src: &mut BMFFBox<T>, meta: &mut MetadataBox) -> Result<()> {
if read_fullbox_version_no_flags(src)? != 0 {
return Err(Error::Unsupported("unsupported XmlBox version"));
}
meta.xml = Some(XmlBox::BinaryXmlBox(src.read_into_try_vec()?));
Ok(())
}
/// Parse a metadata box inside a udta box
fn read_ilst<T: Read>(src: &mut BMFFBox<T>, meta: &mut MetadataBox) -> Result<()> {
let mut iter = src.box_iter();

View File

@ -426,7 +426,7 @@ fn read_vpcc_version_0() {
// TODO: it'd be better to find a real sample here.
#[test]
#[allow(clippy::inconsistent_digit_grouping)] // Allow odd grouping for test readability.
#[allow(clippy::unusual_byte_groupings)] // Allow odd grouping for test readability.
fn read_vpcc_version_1() {
let data_length = 12u16;
let mut stream = make_fullbox(BoxSize::Auto, b"vpcC", 1, |s| {

View File

@ -0,0 +1,547 @@
// This Source Code Form is subject to the terms of the Mozilla Public
// License, v. 2.0. If a copy of the MPL was not distributed with this
// file, You can obtain one at https://mozilla.org/MPL/2.0/.
use num_traits::{CheckedAdd, CheckedSub, PrimInt, Zero};
use std::ops::{Add, Neg, Sub};
use super::*;
/// A zero-overhead wrapper around integer types for the sake of always
/// requiring checked arithmetic
#[repr(transparent)]
#[derive(Debug, Default, Copy, Clone, PartialEq, Eq, PartialOrd, Ord)]
pub struct CheckedInteger<T>(pub T);
impl<T> From<T> for CheckedInteger<T> {
fn from(i: T) -> Self {
Self(i)
}
}
// Orphan rules prevent a more general implementation, but this suffices
impl From<CheckedInteger<i64>> for i64 {
fn from(checked: CheckedInteger<i64>) -> i64 {
checked.0
}
}
impl<T, U: Into<T>> Add<U> for CheckedInteger<T>
where
T: CheckedAdd,
{
type Output = Option<Self>;
fn add(self, other: U) -> Self::Output {
self.0.checked_add(&other.into()).map(Into::into)
}
}
impl<T, U: Into<T>> Sub<U> for CheckedInteger<T>
where
T: CheckedSub,
{
type Output = Option<Self>;
fn sub(self, other: U) -> Self::Output {
self.0.checked_sub(&other.into()).map(Into::into)
}
}
/// Implement subtraction of checked `u64`s returning i64
// This is necessary for handling Mp4parseTrackInfo::media_time gracefully
impl Sub for CheckedInteger<u64> {
type Output = Option<CheckedInteger<i64>>;
fn sub(self, other: Self) -> Self::Output {
if self >= other {
self.0
.checked_sub(other.0)
.and_then(|u| i64::try_from(u).ok())
.map(CheckedInteger)
} else {
other
.0
.checked_sub(self.0)
.and_then(|u| i64::try_from(u).ok())
.map(i64::neg)
.map(CheckedInteger)
}
}
}
#[test]
fn u64_subtraction_returning_i64() {
// self > other
assert_eq!(
CheckedInteger(2u64) - CheckedInteger(1u64),
Some(CheckedInteger(1i64))
);
// self == other
assert_eq!(
CheckedInteger(1u64) - CheckedInteger(1u64),
Some(CheckedInteger(0i64))
);
// difference too large to store in i64
assert_eq!(CheckedInteger(u64::MAX) - CheckedInteger(1u64), None);
// self < other
assert_eq!(
CheckedInteger(1u64) - CheckedInteger(2u64),
Some(CheckedInteger(-1i64))
);
// difference not representable due to overflow
assert_eq!(CheckedInteger(1u64) - CheckedInteger(u64::MAX), None);
}
impl<T: std::cmp::PartialEq> PartialEq<T> for CheckedInteger<T> {
fn eq(&self, other: &T) -> bool {
self.0 == *other
}
}
/// Provides the following information about a sample in the source file:
/// sample data offset (start and end), composition time in microseconds
/// (start and end) and whether it is a sync sample
#[repr(C)]
#[derive(Default, Debug, PartialEq)]
pub struct Indice {
/// The byte offset in the file where the indexed sample begins.
pub start_offset: CheckedInteger<u64>,
/// The byte offset in the file where the indexed sample ends. This is
/// equivalent to `start_offset` + the length in bytes of the indexed
/// sample. Typically this will be the `start_offset` of the next sample
/// in the file.
pub end_offset: CheckedInteger<u64>,
/// The time in microseconds when the indexed sample should be displayed.
/// Analogous to the concept of presentation time stamp (pts).
pub start_composition: CheckedInteger<i64>,
/// The time in microseconds when the indexed sample should stop being
/// displayed. Typically this would be the `start_composition` time of the
/// next sample if samples were ordered by composition time.
pub end_composition: CheckedInteger<i64>,
/// The time in microseconds that the indexed sample should be decoded at.
/// Analogous to the concept of decode time stamp (dts).
pub start_decode: CheckedInteger<i64>,
/// Set if the indexed sample is a sync sample. The meaning of sync is
/// somewhat codec specific, but essentially amounts to if the sample is a
/// key frame.
pub sync: bool,
}
/// Create a vector of `Indice`s with the information about track samples.
/// It uses `stsc`, `stco`, `stsz` and `stts` boxes to construct a list of
/// every sample in the file and provides offsets which can be used to read
/// raw sample data from the file.
#[allow(clippy::reversed_empty_ranges)]
pub fn create_sample_table(
track: &Track,
track_offset_time: CheckedInteger<i64>,
) -> Option<TryVec<Indice>> {
let timescale = match track.timescale {
Some(ref t) => TrackTimeScale::<i64>(t.0 as i64, t.1),
_ => return None,
};
let (stsc, stco, stsz, stts) = match (&track.stsc, &track.stco, &track.stsz, &track.stts) {
(&Some(ref a), &Some(ref b), &Some(ref c), &Some(ref d)) => (a, b, c, d),
_ => return None,
};
// According to spec, no sync table means every sample is sync sample.
let has_sync_table = matches!(track.stss, Some(_));
let mut sample_size_iter = stsz.sample_sizes.iter();
// Get 'stsc' iterator for (chunk_id, chunk_sample_count) and calculate the sample
// offset address.
// With large numbers of samples, the cost of many allocations dominates,
// so it's worth iterating twice to allocate sample_table just once.
let total_sample_count = sample_to_chunk_iter(&stsc.samples, &stco.offsets)
.by_ref()
.map(|(_, sample_counts)| sample_counts.to_usize())
.sum();
let mut sample_table = TryVec::with_capacity(total_sample_count).ok()?;
for i in sample_to_chunk_iter(&stsc.samples, &stco.offsets) {
let chunk_id = i.0 as usize;
let sample_counts = i.1;
let mut cur_position = match stco.offsets.get(chunk_id) {
Some(&i) => i.into(),
_ => return None,
};
for _ in 0..sample_counts {
let start_offset = cur_position;
let end_offset = match (stsz.sample_size, sample_size_iter.next()) {
(_, Some(t)) => (start_offset + *t)?,
(t, _) if t > 0 => (start_offset + t)?,
_ => 0.into(),
};
if end_offset == 0 {
return None;
}
cur_position = end_offset;
sample_table
.push(Indice {
start_offset,
end_offset,
sync: !has_sync_table,
..Default::default()
})
.ok()?;
}
}
// Mark the sync sample in sample_table according to 'stss'.
if let Some(ref v) = track.stss {
for iter in &v.samples {
match iter
.checked_sub(&1)
.and_then(|idx| sample_table.get_mut(idx as usize))
{
Some(elem) => elem.sync = true,
_ => return None,
}
}
}
let ctts_iter = track.ctts.as_ref().map(|v| v.samples.as_slice().iter());
let mut ctts_offset_iter = TimeOffsetIterator {
cur_sample_range: (0..0),
cur_offset: 0,
ctts_iter,
track_id: track.id,
};
let mut stts_iter = TimeToSampleIterator {
cur_sample_count: (0..0),
cur_sample_delta: 0,
stts_iter: stts.samples.as_slice().iter(),
track_id: track.id,
};
// sum_delta is the sum of stts_iter delta.
// According to sepc:
// decode time => DT(n) = DT(n-1) + STTS(n)
// composition time => CT(n) = DT(n) + CTTS(n)
// Note:
// composition time needs to add the track offset time from 'elst' table.
let mut sum_delta = TrackScaledTime::<i64>(0, track.id);
for sample in sample_table.as_mut_slice() {
let decode_time = sum_delta;
sum_delta = (sum_delta + stts_iter.next_delta())?;
// ctts_offset is the current sample offset time.
let ctts_offset = ctts_offset_iter.next_offset_time();
let start_composition = track_time_to_us((decode_time + ctts_offset)?, timescale)?.0;
let end_composition = track_time_to_us((sum_delta + ctts_offset)?, timescale)?.0;
let start_decode = track_time_to_us(decode_time, timescale)?.0;
sample.start_composition = (track_offset_time + start_composition)?;
sample.end_composition = (track_offset_time + end_composition)?;
sample.start_decode = start_decode.into();
}
// Correct composition end time due to 'ctts' causes composition time re-ordering.
//
// Composition end time is not in specification. However, gecko needs it, so we need to
// calculate to correct the composition end time.
if !sample_table.is_empty() {
// Create an index table refers to sample_table and sorted by start_composisiton time.
let mut sort_table = TryVec::with_capacity(sample_table.len()).ok()?;
for i in 0..sample_table.len() {
sort_table.push(i).ok()?;
}
sort_table.sort_by_key(|i| match sample_table.get(*i) {
Some(v) => v.start_composition,
_ => 0.into(),
});
for indices in sort_table.windows(2) {
if let [current_index, peek_index] = *indices {
let next_start_composition_time = sample_table[peek_index].start_composition;
let sample = &mut sample_table[current_index];
sample.end_composition = next_start_composition_time;
}
}
}
Some(sample_table)
}
// Convert a 'ctts' compact table to full table by iterator,
// (sample_with_the_same_offset_count, offset) => (offset), (offset), (offset) ...
//
// For example:
// (2, 10), (4, 9) into (10, 10, 9, 9, 9, 9) by calling next_offset_time().
struct TimeOffsetIterator<'a> {
cur_sample_range: std::ops::Range<u32>,
cur_offset: i64,
ctts_iter: Option<std::slice::Iter<'a, TimeOffset>>,
track_id: usize,
}
impl<'a> Iterator for TimeOffsetIterator<'a> {
type Item = i64;
#[allow(clippy::reversed_empty_ranges)]
fn next(&mut self) -> Option<i64> {
let has_sample = self.cur_sample_range.next().or_else(|| {
// At end of current TimeOffset, find the next TimeOffset.
let iter = match self.ctts_iter {
Some(ref mut v) => v,
_ => return None,
};
let offset_version;
self.cur_sample_range = match iter.next() {
Some(v) => {
offset_version = v.time_offset;
0..v.sample_count
}
_ => {
offset_version = TimeOffsetVersion::Version0(0);
0..0
}
};
self.cur_offset = match offset_version {
TimeOffsetVersion::Version0(i) => i64::from(i),
TimeOffsetVersion::Version1(i) => i64::from(i),
};
self.cur_sample_range.next()
});
has_sample.and(Some(self.cur_offset))
}
}
impl<'a> TimeOffsetIterator<'a> {
fn next_offset_time(&mut self) -> TrackScaledTime<i64> {
match self.next() {
Some(v) => TrackScaledTime::<i64>(v as i64, self.track_id),
_ => TrackScaledTime::<i64>(0, self.track_id),
}
}
}
// Convert 'stts' compact table to full table by iterator,
// (sample_count_with_the_same_time, time) => (time, time, time) ... repeats
// sample_count_with_the_same_time.
//
// For example:
// (2, 3000), (1, 2999) to (3000, 3000, 2999).
struct TimeToSampleIterator<'a> {
cur_sample_count: std::ops::Range<u32>,
cur_sample_delta: u32,
stts_iter: std::slice::Iter<'a, Sample>,
track_id: usize,
}
impl<'a> Iterator for TimeToSampleIterator<'a> {
type Item = u32;
#[allow(clippy::reversed_empty_ranges)]
fn next(&mut self) -> Option<u32> {
let has_sample = self.cur_sample_count.next().or_else(|| {
self.cur_sample_count = match self.stts_iter.next() {
Some(v) => {
self.cur_sample_delta = v.sample_delta;
0..v.sample_count
}
_ => 0..0,
};
self.cur_sample_count.next()
});
has_sample.and(Some(self.cur_sample_delta))
}
}
impl<'a> TimeToSampleIterator<'a> {
fn next_delta(&mut self) -> TrackScaledTime<i64> {
match self.next() {
Some(v) => TrackScaledTime::<i64>(i64::from(v), self.track_id),
_ => TrackScaledTime::<i64>(0, self.track_id),
}
}
}
// Convert 'stco' compact table to full table by iterator.
// (start_chunk_num, sample_number) => (start_chunk_num, sample_number),
// (start_chunk_num + 1, sample_number),
// (start_chunk_num + 2, sample_number),
// ...
// (next start_chunk_num, next sample_number),
// ...
//
// For example:
// (1, 5), (5, 10), (9, 2) => (1, 5), (2, 5), (3, 5), (4, 5), (5, 10), (6, 10),
// (7, 10), (8, 10), (9, 2)
fn sample_to_chunk_iter<'a>(
stsc_samples: &'a TryVec<SampleToChunk>,
stco_offsets: &'a TryVec<u64>,
) -> SampleToChunkIterator<'a> {
SampleToChunkIterator {
chunks: (0..0),
sample_count: 0,
stsc_peek_iter: stsc_samples.as_slice().iter().peekable(),
remain_chunk_count: stco_offsets
.len()
.try_into()
.expect("stco.entry_count is u32"),
}
}
struct SampleToChunkIterator<'a> {
chunks: std::ops::Range<u32>,
sample_count: u32,
stsc_peek_iter: std::iter::Peekable<std::slice::Iter<'a, SampleToChunk>>,
remain_chunk_count: u32, // total chunk number from 'stco'.
}
impl<'a> Iterator for SampleToChunkIterator<'a> {
type Item = (u32, u32);
fn next(&mut self) -> Option<(u32, u32)> {
let has_chunk = self.chunks.next().or_else(|| {
self.chunks = self.locate();
self.remain_chunk_count
.checked_sub(
self.chunks
.len()
.try_into()
.expect("len() of a Range<u32> must fit in u32"),
)
.and_then(|res| {
self.remain_chunk_count = res;
self.chunks.next()
})
});
has_chunk.map(|id| (id, self.sample_count))
}
}
impl<'a> SampleToChunkIterator<'a> {
#[allow(clippy::reversed_empty_ranges)]
fn locate(&mut self) -> std::ops::Range<u32> {
loop {
return match (self.stsc_peek_iter.next(), self.stsc_peek_iter.peek()) {
(Some(next), Some(peek)) if next.first_chunk == peek.first_chunk => {
// Invalid entry, skip it and will continue searching at
// next loop iteration.
continue;
}
(Some(next), Some(peek)) if next.first_chunk > 0 && peek.first_chunk > 0 => {
self.sample_count = next.samples_per_chunk;
(next.first_chunk - 1)..(peek.first_chunk - 1)
}
(Some(next), None) if next.first_chunk > 0 => {
self.sample_count = next.samples_per_chunk;
// Total chunk number in 'stsc' could be different to 'stco',
// there could be more chunks at the last 'stsc' record.
match next.first_chunk.checked_add(self.remain_chunk_count) {
Some(r) => (next.first_chunk - 1)..r - 1,
_ => 0..0,
}
}
_ => 0..0,
};
}
}
}
/// Calculate numerator * scale / denominator, if possible.
///
/// Applying the associativity of integer arithmetic, we divide first
/// and add the remainder after multiplying each term separately
/// to preserve precision while leaving more headroom. That is,
/// (n * s) / d is split into floor(n / d) * s + (n % d) * s / d.
///
/// Return None on overflow or if the denominator is zero.
fn rational_scale<T, S>(numerator: T, denominator: T, scale2: S) -> Option<T>
where
T: PrimInt + Zero,
S: PrimInt,
{
if denominator.is_zero() {
return None;
}
let integer = numerator / denominator;
let remainder = numerator % denominator;
num_traits::cast(scale2).and_then(|s| match integer.checked_mul(&s) {
Some(integer) => remainder
.checked_mul(&s)
.and_then(|remainder| (remainder / denominator).checked_add(&integer)),
None => None,
})
}
#[derive(Debug, PartialEq)]
pub struct Microseconds<T>(pub T);
/// Convert `time` in media's global (mvhd) timescale to microseconds,
/// using provided `MediaTimeScale`
pub fn media_time_to_us(time: MediaScaledTime, scale: MediaTimeScale) -> Option<Microseconds<u64>> {
let microseconds_per_second = 1_000_000;
rational_scale(time.0, scale.0, microseconds_per_second).map(Microseconds)
}
/// Convert `time` in track's local (mdhd) timescale to microseconds,
/// using provided `TrackTimeScale<T>`
pub fn track_time_to_us<T>(
time: TrackScaledTime<T>,
scale: TrackTimeScale<T>,
) -> Option<Microseconds<T>>
where
T: PrimInt + Zero,
{
assert_eq!(time.1, scale.1);
let microseconds_per_second = 1_000_000;
rational_scale(time.0, scale.0, microseconds_per_second).map(Microseconds)
}
#[test]
fn rational_scale_overflow() {
assert_eq!(rational_scale::<u64, u64>(17, 3, 1000), Some(5666));
let large = 0x4000_0000_0000_0000;
assert_eq!(rational_scale::<u64, u64>(large, 2, 2), Some(large));
assert_eq!(rational_scale::<u64, u64>(large, 4, 4), Some(large));
assert_eq!(rational_scale::<u64, u64>(large, 2, 8), None);
assert_eq!(rational_scale::<u64, u64>(large, 8, 4), Some(large / 2));
assert_eq!(rational_scale::<u64, u64>(large + 1, 4, 4), Some(large + 1));
assert_eq!(rational_scale::<u64, u64>(large, 40, 1000), None);
}
#[test]
fn media_time_overflow() {
let scale = MediaTimeScale(90000);
let duration = MediaScaledTime(9_007_199_254_710_000);
assert_eq!(
media_time_to_us(duration, scale),
Some(Microseconds(100_079_991_719_000_000u64))
);
}
#[test]
fn track_time_overflow() {
let scale = TrackTimeScale(44100u64, 0);
let duration = TrackScaledTime(4_413_527_634_807_900u64, 0);
assert_eq!(
track_time_to_us(duration, scale),
Some(Microseconds(100_079_991_719_000_000u64))
);
}

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

View File

@ -5,9 +5,10 @@
extern crate mp4parse as mp4;
use mp4::Error;
use mp4::ParseStrictness;
use std::convert::TryInto;
use std::fs::File;
use std::io::{Cursor, Read};
use std::io::{Cursor, Read, Seek, SeekFrom};
use std::path::Path;
static MINI_MP4: &str = "tests/minimal.mp4";
@ -32,18 +33,38 @@ static VIDEO_AV1_MP4: &str = "tests/tiny_av1.mp4";
// https://bugzilla.mozilla.org/show_bug.cgi?id=1687357 for more information.
static VIDEO_INVALID_USERDATA: &str = "tests/invalid_userdata.mp4";
static IMAGE_AVIF: &str = "av1-avif/testFiles/Microsoft/Monochrome.avif";
static IMAGE_AVIF_EXTENTS: &str = "tests/kodim-extents.avif";
static IMAGE_AVIF_EXTENTS: &str = "tests/multiple-extents.avif";
static IMAGE_AVIF_ALPHA: &str = "tests/bug-1661347.avif";
static IMAGE_AVIF_CORRUPT: &str = "tests/corrupt/bug-1655846.avif";
static IMAGE_AVIF_CORRUPT_2: &str = "tests/corrupt/bug-1661347.avif";
static IMAGE_AVIF_CORRUPT_3: &str = "tests/corrupt/bad-ipma-version.avif";
static IMAGE_AVIF_CORRUPT_4: &str = "tests/corrupt/bad-ipma-flags.avif";
static IMAGE_AVIF_IPMA_BAD_VERSION: &str = "tests/corrupt/bad-ipma-version.avif";
static IMAGE_AVIF_IPMA_BAD_FLAGS: &str = "tests/corrupt/bad-ipma-flags.avif";
static IMAGE_AVIF_IPMA_DUPLICATE_VERSION_AND_FLAGS: &str =
"tests/corrupt/ipma-duplicate-version-and-flags.avif";
static IMAGE_AVIF_NO_HDLR: &str = "tests/corrupt/hdlr-not-first.avif";
static IMAGE_AVIF_HDLR_NOT_FIRST: &str = "tests/corrupt/no-hdlr.avif";
static IMAGE_AVIF_HDLR_NOT_PICT: &str = "tests/corrupt/hdlr-not-pict.avif";
static IMAGE_AVIF_NO_MIF1: &str = "tests/corrupt/no-mif1.avif";
static IMAGE_AVIF_NO_PIXI: &str = "tests/corrupt/no-pixi.avif";
static IMAGE_AVIF_NO_ALPHA_PIXI: &str = "tests/corrupt/no-pixi-for-alpha.avif";
static IMAGE_AVIF_AV1C_MISSING_ESSENTIAL: &str = "tests/corrupt/av1C-missing-essential.avif";
static IMAGE_AVIF_CLAP_MISSING_ESSENTIAL: &str = "tests/corrupt/clap-missing-essential.avif";
static IMAGE_AVIF_IMIR_MISSING_ESSENTIAL: &str = "tests/corrupt/imir-missing-essential.avif";
static IMAGE_AVIF_IROT_MISSING_ESSENTIAL: &str = "tests/corrupt/irot-missing-essential.avif";
static IMAGE_AVIF_GRID: &str = "av1-avif/testFiles/Microsoft/Summer_in_Tomsk_720p_5x4_grid.avif";
static AVIF_TEST_DIRS: &[&str] = &["tests", "av1-avif/testFiles"];
static AVIF_CORRUPT_IMAGES: &str = "tests/corrupt";
// The 1 frame h263 3gp file can be generated by ffmpeg with command
// "ffmpeg -i [input file] -f 3gp -vcodec h263 -vf scale=176x144 -frames:v 1 -an output.3gp"
static VIDEO_H263_3GP: &str = "tests/bbb_sunflower_QCIF_30fps_h263_noaudio_1f.3gp";
// The 1 frame AMR-NB 3gp file can be generated by ffmpeg with command
// "ffmpeg -i [input file] -f 3gp -acodec amr_nb -ar 8000 -ac 1 -frames:a 1 -vn output.3gp"
#[cfg(feature = "3gpp")]
static AUDIO_AMRNB_3GP: &str = "tests/amr_nb_1f.3gp";
// The 1 frame AMR-WB 3gp file can be generated by ffmpeg with command
// "ffmpeg -i [input file] -f 3gp -acodec amr_wb -ar 16000 -ac 1 -frames:a 1 -vn output.3gp"
#[cfg(feature = "3gpp")]
static AUDIO_AMRWB_3GP: &str = "tests/amr_wb_1f.3gp";
// Adapted from https://github.com/GuillaumeGomez/audio-video-metadata/blob/9dff40f565af71d5502e03a2e78ae63df95cfd40/src/metadata.rs#L53
#[test]
@ -157,6 +178,10 @@ fn public_api() {
mp4::AudioCodecSpecific::LPCM => {
"LPCM"
}
#[cfg(feature = "3gpp")]
mp4::AudioCodecSpecific::AMRSpecificBox(_) => {
"AMR"
}
},
"ES"
);
@ -683,7 +708,7 @@ fn public_mp4_bug_1185230() {
#[test]
fn public_avif_primary_item() {
let input = &mut File::open(IMAGE_AVIF).expect("Unknown file");
let context = mp4::read_avif(input).expect("read_avif failed");
let context = mp4::read_avif(input, ParseStrictness::Normal).expect("read_avif failed");
assert_eq!(context.primary_item().len(), 6979);
assert_eq!(context.primary_item()[0..4], [0x12, 0x00, 0x0a, 0x0a]);
}
@ -691,14 +716,14 @@ fn public_avif_primary_item() {
#[test]
fn public_avif_primary_item_split_extents() {
let input = &mut File::open(IMAGE_AVIF_EXTENTS).expect("Unknown file");
let context = mp4::read_avif(input).expect("read_avif failed");
assert_eq!(context.primary_item().len(), 4387);
let context = mp4::read_avif(input, ParseStrictness::Normal).expect("read_avif failed");
assert_eq!(context.primary_item().len(), 52);
}
#[test]
fn public_avif_alpha_item() {
let input = &mut File::open(IMAGE_AVIF_ALPHA).expect("Unknown file");
let context = mp4::read_avif(input).expect("read_avif failed");
let context = mp4::read_avif(input, ParseStrictness::Normal).expect("read_avif failed");
assert!(context.alpha_item().is_some());
assert!(!context.premultiplied_alpha);
}
@ -706,49 +731,139 @@ fn public_avif_alpha_item() {
#[test]
fn public_avif_bug_1655846() {
let input = &mut File::open(IMAGE_AVIF_CORRUPT).expect("Unknown file");
assert!(mp4::read_avif(input).is_err());
assert!(mp4::read_avif(input, ParseStrictness::Normal).is_err());
}
#[test]
fn public_avif_bug_1661347() {
let input = &mut File::open(IMAGE_AVIF_CORRUPT_2).expect("Unknown file");
assert!(mp4::read_avif(input).is_err());
assert!(mp4::read_avif(input, ParseStrictness::Normal).is_err());
}
fn assert_invalid_data<T: std::fmt::Debug>(result: mp4::Result<T>, expected_msg: &str) {
match result {
Err(Error::InvalidData(msg)) if msg == expected_msg => {}
Err(Error::InvalidData(msg)) if msg != expected_msg => {
panic!(
"Error message mismtatch\nExpected: {}\nFound: {}",
expected_msg, msg
);
}
r => panic!(
"Expected Err(Error::InvalidData({:?})), found {:?}",
"Expected Err(Error::InvalidData({:?}), found {:?}",
expected_msg, r
),
}
}
#[test]
fn public_avif_bad_ipma_version() {
let input = &mut File::open(IMAGE_AVIF_CORRUPT_3).expect("Unknown file");
let expected_msg = "The version 0 should be used unless 32-bit item_ID values are needed";
assert_invalid_data(mp4::read_avif(input), expected_msg);
/// Check that input generates the expected error only in strict parsing mode
fn assert_avif_should(path: &str, expected_msg: &str) {
let input = &mut File::open(path).expect("Unknown file");
assert_invalid_data(mp4::read_avif(input, ParseStrictness::Strict), expected_msg);
input.seek(SeekFrom::Start(0)).expect("rewind failed");
mp4::read_avif(input, ParseStrictness::Normal).expect("ParseStrictness::Normal failed");
input.seek(SeekFrom::Start(0)).expect("rewind failed");
mp4::read_avif(input, ParseStrictness::Permissive).expect("ParseStrictness::Permissive failed");
}
/// Check that input generates the expected unless in permissive parsing mode
fn assert_avif_shall(path: &str, expected_msg: &str) {
let input = &mut File::open(path).expect("Unknown file");
assert_invalid_data(mp4::read_avif(input, ParseStrictness::Strict), expected_msg);
input.seek(SeekFrom::Start(0)).expect("rewind failed");
assert_invalid_data(mp4::read_avif(input, ParseStrictness::Normal), expected_msg);
input.seek(SeekFrom::Start(0)).expect("rewind failed");
mp4::read_avif(input, ParseStrictness::Permissive).expect("ParseStrictness::Permissive failed");
}
#[test]
fn public_avif_bad_ipma_flags() {
let input = &mut File::open(IMAGE_AVIF_CORRUPT_4).expect("Unknown file");
let expected_msg = "flags should be equal to 0 unless there are more than 127 properties in the ItemPropertyContainerBox";
assert_invalid_data(mp4::read_avif(input), expected_msg);
fn public_avif_ipma_missing_essential() {
let expected_msg = "All transformative properties associated with \
coded and derived images required or conditionally \
required by this document shall be marked as essential \
per MIAF (ISO 23000-22:2019) § 7.3.9";
assert_avif_should(IMAGE_AVIF_AV1C_MISSING_ESSENTIAL, expected_msg);
assert_avif_should(IMAGE_AVIF_CLAP_MISSING_ESSENTIAL, expected_msg);
assert_avif_should(IMAGE_AVIF_IMIR_MISSING_ESSENTIAL, expected_msg);
assert_avif_should(IMAGE_AVIF_IROT_MISSING_ESSENTIAL, expected_msg);
}
#[test]
fn public_avif_ipma_bad_version() {
let expected_msg = "The ipma version 0 should be used unless 32-bit \
item_ID values are needed \
per HEIF (ISO 23008-12:2017) § 9.3.1";
assert_avif_should(IMAGE_AVIF_IPMA_BAD_VERSION, expected_msg);
}
#[test]
fn public_avif_ipma_bad_flags() {
let expected_msg = "Unless there are more than 127 properties in the \
ItemPropertyContainerBox, flags should be equal to 0 \
per HEIF (ISO 23008-12:2017) § 9.3.1";
assert_avif_should(IMAGE_AVIF_IPMA_BAD_FLAGS, expected_msg);
}
#[test]
fn public_avif_ipma_duplicate_version_and_flags() {
let expected_msg = "There shall be at most one ItemPropertyAssociationbox \
with a given pair of values of version and flags \
per HEIF (ISO 23008-12:2017) § 9.3.1";
assert_avif_shall(IMAGE_AVIF_IPMA_DUPLICATE_VERSION_AND_FLAGS, expected_msg);
}
#[test]
fn public_avif_hdlr_first_in_meta() {
let expected_msg = "The HandlerBox shall be the first contained box within \
the MetaBox \
per MIAF (ISO 23000-22:2019) § 7.2.1.5";
assert_avif_shall(IMAGE_AVIF_NO_HDLR, expected_msg);
assert_avif_shall(IMAGE_AVIF_HDLR_NOT_FIRST, expected_msg);
}
#[test]
fn public_avif_hdlr_is_pict() {
let expected_msg = "The HandlerBox handler_type must be 'pict' \
per MIAF (ISO 23000-22:2019) § 7.2.1.5";
assert_avif_shall(IMAGE_AVIF_HDLR_NOT_PICT, expected_msg);
}
#[test]
fn public_avif_no_mif1() {
let expected_msg = "The FileTypeBox should contain 'mif1' in the compatible_brands list \
per MIAF (ISO 23000-22:2019) § 7.2.1.2";
assert_avif_should(IMAGE_AVIF_NO_MIF1, expected_msg);
}
#[test]
fn public_avif_pixi_present_for_displayable_images() {
let expected_msg = "The pixel information property shall be associated with every image \
that is displayable (not hidden) \
per MIAF (ISO/IEC 23000-22:2019) specification § 7.3.6.6";
assert_avif_shall(IMAGE_AVIF_NO_PIXI, expected_msg);
assert_avif_shall(IMAGE_AVIF_NO_ALPHA_PIXI, expected_msg);
}
#[test]
#[ignore] // Remove when we add support; see https://github.com/mozilla/mp4parse-rust/issues/198
fn public_avif_primary_item_is_grid() {
let input = &mut File::open(IMAGE_AVIF_GRID).expect("Unknown file");
mp4::read_avif(input).expect("read_avif failed");
mp4::read_avif(input, ParseStrictness::Normal).expect("read_avif failed");
// Add some additional checks
}
#[test]
fn public_avif_read_samples() {
public_avif_read_samples_impl(ParseStrictness::Normal);
}
#[test]
#[ignore] // See https://github.com/AOMediaCodec/av1-avif/issues/146
fn public_avif_read_samples_strict() {
public_avif_read_samples_impl(ParseStrictness::Strict);
}
fn public_avif_read_samples_impl(strictness: ParseStrictness) {
for dir in AVIF_TEST_DIRS {
for entry in walkdir::WalkDir::new(dir) {
let entry = entry.expect("AVIF entry");
@ -767,7 +882,7 @@ fn public_avif_read_samples() {
}
println!("parsing {:?}", path);
let input = &mut File::open(path).expect("Unknow file");
mp4::read_avif(input).expect("read_avif failed");
mp4::read_avif(input, strictness).expect("read_avif failed");
}
}
}
@ -797,3 +912,53 @@ fn public_video_h263() {
};
}
}
#[test]
#[cfg(feature = "3gpp")]
fn public_audio_amrnb() {
let mut fd = File::open(AUDIO_AMRNB_3GP).expect("Unknown file");
let mut buf = Vec::new();
fd.read_to_end(&mut buf).expect("File error");
let mut c = Cursor::new(&buf);
let context = mp4::read_mp4(&mut c).expect("read_mp4 failed");
for track in context.tracks {
let stsd = track.stsd.expect("expected an stsd");
let a = match stsd.descriptions.first().expect("expected a SampleEntry") {
mp4::SampleEntry::Audio(ref v) => v,
_ => panic!("expected a AudioSampleEntry"),
};
assert!(a.codec_type == mp4::CodecType::AMRNB);
let _codec_specific = match a.codec_specific {
mp4::AudioCodecSpecific::AMRSpecificBox(_) => true,
_ => {
panic!("expected a AMRSpecificBox",);
}
};
}
}
#[test]
#[cfg(feature = "3gpp")]
fn public_audio_amrwb() {
let mut fd = File::open(AUDIO_AMRWB_3GP).expect("Unknown file");
let mut buf = Vec::new();
fd.read_to_end(&mut buf).expect("File error");
let mut c = Cursor::new(&buf);
let context = mp4::read_mp4(&mut c).expect("read_mp4 failed");
for track in context.tracks {
let stsd = track.stsd.expect("expected an stsd");
let a = match stsd.descriptions.first().expect("expected a SampleEntry") {
mp4::SampleEntry::Audio(ref v) => v,
_ => panic!("expected a AudioSampleEntry"),
};
assert!(a.codec_type == mp4::CodecType::AMRWB);
let _codec_specific = match a.codec_specific {
mp4::AudioCodecSpecific::AMRSpecificBox(_) => true,
_ => {
panic!("expected a AMRSpecificBox",);
}
};
}
}

View File

@ -1 +1 @@
{"files":{"Cargo.toml":"9f188cfbdda3daf4dc36638f136f3640c21cad5eef8df60f66826f06cf2cc0e0","cbindgen.toml":"5c9429f271d6e914d81b63e6509c04ffe84cab11ed3a53a2ed4715e5d5ace80e","examples/dump.rs":"04db9535bcdd36b0d1371a6b99d573128bf457444a41b06a8acb8b5402127c1e","src/lib.rs":"6bae865477eae9b12300f53627844fa7d63bf50b94858a378f3622f7cc753c4e","tests/test_chunk_out_of_range.rs":"b5da583218d98027ed973a29c67434a91a1306f2d2fb39ec4d640d4824c308ce","tests/test_encryption.rs":"77c71a954ca3f54120b852641dc699994d6024aa13a955450fa4cf913a88b7b4","tests/test_fragment.rs":"e90eb5a4418d30002655466c0c4b3125c7fd70a74b6871471eaa172f1def9db8","tests/test_rotation.rs":"fb43c2f2dfa496d151c33bdd46c0fd3252387c23cc71e2cac9ed0234de715a81","tests/test_sample_table.rs":"185755909b2f4e0ea99604bb423a07623d614a180accdaebd1c98aef2c2e3ae6","tests/test_workaround_stsc.rs":"7dd419f3d55b9a3a039cac57e58a9240a9c8166bcd4356c24f69f731c3ced83b"},"package":null}
{"files":{"Cargo.toml":"f664f599ce265f10355fe2cd6e4fbb24eb00e53a02b6a84f6a286d9806a2758d","cbindgen.toml":"f596433a2a736c0170b85b9eb4627ce4e5620bc9d6b221d9f45d2be12ca5e42a","examples/dump.rs":"aa0f8cd1bd7cfda2ec9dfdb72255f3a3c013c9c5d13e84f27c58ac8a42b147b9","src/lib.rs":"44cc6a3e200d70ad03e100fa6efb5d89d3723436dd8bf7726164c72ba09145c2","tests/test_chunk_out_of_range.rs":"b5da583218d98027ed973a29c67434a91a1306f2d2fb39ec4d640d4824c308ce","tests/test_encryption.rs":"b918f0f10e7708bff5fae4becf1d09a188db25d874d0919d509b90266504eed7","tests/test_fragment.rs":"e90eb5a4418d30002655466c0c4b3125c7fd70a74b6871471eaa172f1def9db8","tests/test_rotation.rs":"fb43c2f2dfa496d151c33bdd46c0fd3252387c23cc71e2cac9ed0234de715a81","tests/test_sample_table.rs":"19b8d0b0f7ed79a857329321b49f5a7f687901cadd4cd22bc6728febd919d3ce","tests/test_workaround_stsc.rs":"7dd419f3d55b9a3a039cac57e58a9240a9c8166bcd4356c24f69f731c3ced83b"},"package":null}

View File

@ -27,8 +27,12 @@ travis-ci = { repository = "https://github.com/mozilla/mp4parse-rust" }
byteorder = "1.2.1"
fallible_collections = { version = "0.3", features = ["std_io"] }
log = "0.4"
mp4parse = {version = "0.11.5", path = "../mp4parse"}
mp4parse = { version = "0.11.5", path = "../mp4parse", features = ["unstable-api"] }
num-traits = "0.2.14"
[dev-dependencies]
env_logger = "0.8"
[features]
3gpp = ["mp4parse/3gpp"]
meta-xml = ["mp4parse/meta-xml"]

View File

@ -15,3 +15,17 @@ cpp_compat = true
[enum]
rename_variants = "QualifiedScreamingSnakeCase"
[defines]
"feature = 3gpp" = "MP4PARSE_FEATURE_3GPP"
"feature = meta-xml" = "MP4PARSE_FEATURE_META_XML"
[parse]
parse_deps = true
include = ["mp4parse"]
[export.rename]
# We need to declare this type in mp4parse, but we rename it in the generated
# header to match mp4parse_capi naming conventions
"ParseStrictness" = "Mp4parseStrictness"
"Indice" = "Mp4parseIndice"

View File

@ -6,6 +6,7 @@ extern crate log;
extern crate env_logger;
use mp4parse::ParseStrictness;
use mp4parse_capi::*;
use std::env;
use std::fs::File;
@ -20,7 +21,7 @@ extern "C" fn buf_read(buf: *mut u8, size: usize, userdata: *mut std::os::raw::c
}
}
fn dump_avif(filename: &str) {
fn dump_avif(filename: &str, strictness: ParseStrictness) {
let mut file = File::open(filename).expect("Unknown file");
let io = Mp4parseIo {
read: Some(buf_read),
@ -29,12 +30,12 @@ fn dump_avif(filename: &str) {
unsafe {
let mut parser = std::ptr::null_mut();
let rv = mp4parse_avif_new(&io, &mut parser);
let rv = mp4parse_avif_new(&io, strictness, &mut parser);
println!("mp4parse_avif_new -> {:?}", rv);
}
}
fn dump_file(filename: &str) {
fn dump_file(filename: &str, strictness: ParseStrictness) {
let mut file = File::open(filename).expect("Unknown file");
let io = Mp4parseIo {
read: Some(buf_read),
@ -49,7 +50,7 @@ fn dump_file(filename: &str) {
Mp4parseStatus::Ok => (),
Mp4parseStatus::Invalid => {
println!("-- failed to parse as mp4 video, trying AVIF");
dump_avif(filename);
dump_avif(filename, strictness);
}
_ => {
println!("-- fail to parse: {:?}, '-v' for more info", rv);
@ -158,17 +159,31 @@ fn dump_file(filename: &str) {
}
fn main() {
let args: Vec<String> = env::args().collect();
if args.len() < 2 {
let mut dump_func: fn(&str, ParseStrictness) = dump_file;
let mut verbose = false;
let mut strictness = ParseStrictness::Normal;
let mut filenames = Vec::new();
for arg in env::args().skip(1) {
match arg.as_str() {
"--avif" => dump_func = dump_avif,
"--strict" => strictness = ParseStrictness::Strict,
"--permissive" => strictness = ParseStrictness::Permissive,
"-v" | "--verbose" => verbose = true,
_ => {
if let Some("-") = arg.get(0..1) {
eprintln!("Ignoring unknown switch {:?}", arg);
} else {
filenames.push(arg)
}
}
}
}
if filenames.is_empty() {
eprintln!("No files to dump, exiting...");
return;
}
// Initialize logging, setting the log level if requested.
let (skip, verbose) = if args[1] == "-v" {
(2, true)
} else {
(1, false)
};
let env = env_logger::Env::default();
let mut logger = env_logger::Builder::from_env(env);
if verbose {
@ -176,9 +191,9 @@ fn main() {
}
logger.init();
for filename in args.iter().skip(skip) {
for filename in filenames {
info!("-- dump of '{}' --", filename);
dump_file(filename);
dump_func(filename.as_str(), strictness);
info!("-- end of '{}' --", filename);
}
}

View File

@ -41,29 +41,24 @@ extern crate mp4parse;
extern crate num_traits;
use byteorder::WriteBytesExt;
use num_traits::{CheckedAdd, CheckedSub};
use num_traits::{PrimInt, Zero};
use std::convert::TryFrom;
use std::convert::TryInto;
use std::io::Read;
use std::ops::Neg;
use std::ops::{Add, Sub};
// Symbols we need from our rust api.
use mp4parse::serialize_opus_header;
use mp4parse::unstable::{
create_sample_table, media_time_to_us, track_time_to_us, CheckedInteger, Indice, Microseconds,
};
use mp4parse::AudioCodecSpecific;
use mp4parse::AvifContext;
use mp4parse::CodecType;
use mp4parse::Error;
use mp4parse::MediaContext;
use mp4parse::MediaScaledTime;
use mp4parse::MediaTimeScale;
// Re-exported so consumers don't have to depend on mp4parse as well
pub use mp4parse::ParseStrictness;
use mp4parse::SampleEntry;
use mp4parse::ToUsize;
use mp4parse::Track;
use mp4parse::TrackScaledTime;
use mp4parse::TrackTimeScale;
use mp4parse::TrackType;
use mp4parse::TryBox;
use mp4parse::TryHashMap;
@ -124,6 +119,10 @@ pub enum Mp4parseCodec {
Ec3,
Alac,
H263,
#[cfg(feature = "3gpp")]
AMRNB,
#[cfg(feature = "3gpp")]
AMRWB,
}
impl Default for Mp4parseCodec {
@ -151,102 +150,6 @@ impl Default for Mp4ParseEncryptionSchemeType {
}
}
/// A zero-overhead wrapper around integer types for the sake of always
/// requiring checked arithmetic
#[repr(transparent)]
#[derive(Debug, Default, Copy, Clone, PartialEq, Eq, PartialOrd, Ord)]
pub struct CheckedInteger<T>(pub T);
impl<T> From<T> for CheckedInteger<T> {
fn from(i: T) -> Self {
Self(i)
}
}
// Orphan rules prevent a more general implementation, but this suffices
impl From<CheckedInteger<i64>> for i64 {
fn from(checked: CheckedInteger<i64>) -> i64 {
checked.0
}
}
impl<T, U: Into<T>> Add<U> for CheckedInteger<T>
where
T: CheckedAdd,
{
type Output = Option<Self>;
fn add(self, other: U) -> Self::Output {
self.0.checked_add(&other.into()).map(Into::into)
}
}
impl<T, U: Into<T>> Sub<U> for CheckedInteger<T>
where
T: CheckedSub,
{
type Output = Option<Self>;
fn sub(self, other: U) -> Self::Output {
self.0.checked_sub(&other.into()).map(Into::into)
}
}
/// Implement subtraction of checked `u64`s returning i64
// This is necessary for handling Mp4parseTrackInfo::media_time gracefully
impl Sub for CheckedInteger<u64> {
type Output = Option<CheckedInteger<i64>>;
fn sub(self, other: Self) -> Self::Output {
if self >= other {
self.0
.checked_sub(other.0)
.and_then(|u| i64::try_from(u).ok())
.map(CheckedInteger)
} else {
other
.0
.checked_sub(self.0)
.and_then(|u| i64::try_from(u).ok())
.map(i64::neg)
.map(CheckedInteger)
}
}
}
#[test]
fn u64_subtraction_returning_i64() {
// self > other
assert_eq!(
CheckedInteger(2u64) - CheckedInteger(1u64),
Some(CheckedInteger(1i64))
);
// self == other
assert_eq!(
CheckedInteger(1u64) - CheckedInteger(1u64),
Some(CheckedInteger(0i64))
);
// difference too large to store in i64
assert_eq!(CheckedInteger(u64::MAX) - CheckedInteger(1u64), None);
// self < other
assert_eq!(
CheckedInteger(1u64) - CheckedInteger(2u64),
Some(CheckedInteger(-1i64))
);
// difference not representable due to overflow
assert_eq!(CheckedInteger(1u64) - CheckedInteger(u64::MAX), None);
}
impl<T: std::cmp::PartialEq> PartialEq<T> for CheckedInteger<T> {
fn eq(&self, other: &T) -> bool {
self.0 == *other
}
}
#[repr(C)]
#[derive(Default, Debug)]
pub struct Mp4parseTrackInfo {
@ -259,39 +162,13 @@ pub struct Mp4parseTrackInfo {
// impl Sub for CheckedInteger<u64>
}
#[repr(C)]
#[derive(Default, Debug, PartialEq)]
pub struct Mp4parseIndice {
/// The byte offset in the file where the indexed sample begins.
pub start_offset: CheckedInteger<u64>,
/// The byte offset in the file where the indexed sample ends. This is
/// equivalent to `start_offset` + the length in bytes of the indexed
/// sample. Typically this will be the `start_offset` of the next sample
/// in the file.
pub end_offset: CheckedInteger<u64>,
/// The time in microseconds when the indexed sample should be displayed.
/// Analogous to the concept of presentation time stamp (pts).
pub start_composition: CheckedInteger<i64>,
/// The time in microseconds when the indexed sample should stop being
/// displayed. Typically this would be the `start_composition` time of the
/// next sample if samples were ordered by composition time.
pub end_composition: CheckedInteger<i64>,
/// The time in microseconds that the indexed sample should be decoded at.
/// Analogous to the concept of decode time stamp (dts).
pub start_decode: CheckedInteger<i64>,
/// Set if the indexed sample is a sync sample. The meaning of sync is
/// somewhat codec specific, but essentially amounts to if the sample is a
/// key frame.
pub sync: bool,
}
#[repr(C)]
#[derive(Debug)]
pub struct Mp4parseByteData {
pub length: u32,
pub length: usize,
// cheddar can't handle generic type, so it needs to be multiple data types here.
pub data: *const u8,
pub indices: *const Mp4parseIndice,
pub indices: *const Indice,
}
impl Default for Mp4parseByteData {
@ -306,12 +183,12 @@ impl Default for Mp4parseByteData {
impl Mp4parseByteData {
fn set_data(&mut self, data: &[u8]) {
self.length = data.len() as u32;
self.length = data.len();
self.data = data.as_ptr();
}
fn set_indices(&mut self, data: &[Mp4parseIndice]) {
self.length = data.len() as u32;
fn set_indices(&mut self, data: &[Indice]) {
self.length = data.len();
self.indices = data.as_ptr();
}
}
@ -324,12 +201,12 @@ pub struct Mp4parsePsshInfo {
#[repr(u8)]
#[derive(Debug, PartialEq)]
pub enum OptionalFourCC {
pub enum OptionalFourCc {
None,
Some([u8; 4]),
}
impl Default for OptionalFourCC {
impl Default for OptionalFourCc {
fn default() -> Self {
Self::None
}
@ -338,7 +215,7 @@ impl Default for OptionalFourCC {
#[repr(C)]
#[derive(Default, Debug)]
pub struct Mp4parseSinfInfo {
pub original_format: OptionalFourCC,
pub original_format: OptionalFourCc,
pub scheme_type: Mp4ParseEncryptionSchemeType,
pub is_encrypted: u8,
pub iv_size: u8,
@ -426,7 +303,7 @@ pub struct Mp4parseParser {
context: MediaContext,
opus_header: TryHashMap<u32, TryVec<u8>>,
pssh_data: TryVec<u8>,
sample_table: TryHashMap<u32, TryVec<Mp4parseIndice>>,
sample_table: TryHashMap<u32, TryVec<Indice>>,
// Store a mapping from track index (not id) to associated sample
// descriptions. Because each track has a variable number of sample
// descriptions, and because we need the data to live long enough to be
@ -454,7 +331,7 @@ where
fn with_context(context: Self::Context) -> Self;
fn read<T: Read>(io: &mut T) -> mp4parse::Result<Self::Context>;
fn read<T: Read>(io: &mut T, strictness: ParseStrictness) -> mp4parse::Result<Self::Context>;
}
impl Mp4parseParser {
@ -477,7 +354,7 @@ impl ContextParser for Mp4parseParser {
}
}
fn read<T: Read>(io: &mut T) -> mp4parse::Result<Self::Context> {
fn read<T: Read>(io: &mut T, _strictness: ParseStrictness) -> mp4parse::Result<Self::Context> {
let r = mp4parse::read_mp4(io);
log::debug!("mp4parse::read_mp4 -> {:?}", r);
r
@ -501,9 +378,12 @@ impl ContextParser for Mp4parseAvifParser {
Self { context }
}
fn read<T: Read>(io: &mut T) -> mp4parse::Result<Self::Context> {
let r = mp4parse::read_avif(io);
log::debug!("mp4parse::read_avif -> {:?}", r);
fn read<T: Read>(io: &mut T, strictness: ParseStrictness) -> mp4parse::Result<Self::Context> {
let r = mp4parse::read_avif(io, strictness);
if r.is_err() {
log::debug!("{:?}", r);
}
log::trace!("mp4parse::read_avif -> {:?}", r);
r
}
}
@ -561,7 +441,7 @@ pub unsafe extern "C" fn mp4parse_new(
io: *const Mp4parseIo,
parser_out: *mut *mut Mp4parseParser,
) -> Mp4parseStatus {
mp4parse_new_common(io, parser_out)
mp4parse_new_common(io, ParseStrictness::Normal, parser_out)
}
/// Allocate an `Mp4parseAvifParser*` to read from the supplied `Mp4parseIo`.
@ -576,13 +456,15 @@ pub unsafe extern "C" fn mp4parse_new(
#[no_mangle]
pub unsafe extern "C" fn mp4parse_avif_new(
io: *const Mp4parseIo,
strictness: ParseStrictness,
parser_out: *mut *mut Mp4parseAvifParser,
) -> Mp4parseStatus {
mp4parse_new_common(io, parser_out)
mp4parse_new_common(io, strictness, parser_out)
}
unsafe fn mp4parse_new_common<P: ContextParser>(
io: *const Mp4parseIo,
strictness: ParseStrictness,
parser_out: *mut *mut P,
) -> Mp4parseStatus {
// Validate arguments from C.
@ -594,7 +476,7 @@ unsafe fn mp4parse_new_common<P: ContextParser>(
{
Mp4parseStatus::BadArg
} else {
match mp4parse_new_common_safe(&mut (*io).clone()) {
match mp4parse_new_common_safe(&mut (*io).clone(), strictness) {
Ok(parser) => {
*parser_out = parser;
Mp4parseStatus::Ok
@ -606,8 +488,9 @@ unsafe fn mp4parse_new_common<P: ContextParser>(
fn mp4parse_new_common_safe<T: Read, P: ContextParser>(
io: &mut T,
strictness: ParseStrictness,
) -> Result<*mut P, Mp4parseStatus> {
P::read(io)
P::read(io, strictness)
.map(P::with_context)
.and_then(|x| TryBox::try_new(x).map_err(mp4parse::Error::from))
.map(TryBox::into_raw)
@ -700,47 +583,6 @@ pub unsafe extern "C" fn mp4parse_get_track_count(
Mp4parseStatus::Ok
}
/// Calculate numerator * scale / denominator, if possible.
///
/// Applying the associativity of integer arithmetic, we divide first
/// and add the remainder after multiplying each term separately
/// to preserve precision while leaving more headroom. That is,
/// (n * s) / d is split into floor(n / d) * s + (n % d) * s / d.
///
/// Return None on overflow or if the denominator is zero.
fn rational_scale<T, S>(numerator: T, denominator: T, scale2: S) -> Option<T>
where
T: PrimInt + Zero,
S: PrimInt,
{
if denominator.is_zero() {
return None;
}
let integer = numerator / denominator;
let remainder = numerator % denominator;
num_traits::cast(scale2).and_then(|s| match integer.checked_mul(&s) {
Some(integer) => remainder
.checked_mul(&s)
.and_then(|remainder| (remainder / denominator).checked_add(&integer)),
None => None,
})
}
fn media_time_to_us(time: MediaScaledTime, scale: MediaTimeScale) -> Option<u64> {
let microseconds_per_second = 1_000_000;
rational_scale::<u64, u64>(time.0, scale.0, microseconds_per_second)
}
fn track_time_to_us<T>(time: TrackScaledTime<T>, scale: TrackTimeScale<T>) -> Option<T>
where
T: PrimInt + Zero,
{
assert_eq!(time.1, scale.1);
let microseconds_per_second = 1_000_000;
rational_scale::<T, u64>(time.0, scale.0, microseconds_per_second)
}
/// Fill the supplied `Mp4parseTrackInfo` with metadata for `track`.
///
/// # Safety
@ -780,19 +622,22 @@ pub unsafe extern "C" fn mp4parse_get_track_info(
let track = &context.tracks[track_index];
if let (Some(track_timescale), Some(context_timescale)) = (track.timescale, context.timescale) {
let media_time: CheckedInteger<_> = match track.media_time.map_or(Some(0), |media_time| {
track_time_to_us(media_time, track_timescale)
}) {
Some(time) => time.into(),
let media_time: CheckedInteger<_> = match track
.media_time
.map_or(Some(Microseconds(0)), |media_time| {
track_time_to_us(media_time, track_timescale)
}) {
Some(time) => time.0.into(),
None => return Mp4parseStatus::Invalid,
};
let empty_duration: CheckedInteger<_> =
match track.empty_duration.map_or(Some(0), |empty_duration| {
let empty_duration: CheckedInteger<_> = match track
.empty_duration
.map_or(Some(Microseconds(0)), |empty_duration| {
media_time_to_us(empty_duration, context_timescale)
}) {
Some(time) => time.into(),
None => return Mp4parseStatus::Invalid,
};
Some(time) => time.0.into(),
None => return Mp4parseStatus::Invalid,
};
info.media_time = match media_time - empty_duration {
Some(difference) => difference,
None => return Mp4parseStatus::Invalid,
@ -800,7 +645,7 @@ pub unsafe extern "C" fn mp4parse_get_track_info(
if let Some(track_duration) = track.duration {
match track_time_to_us(track_duration, track_timescale) {
Some(duration) => info.duration = duration,
Some(duration) => info.duration = duration.0,
None => return Mp4parseStatus::Invalid,
}
} else {
@ -897,6 +742,14 @@ fn get_track_audio_info(
}
AudioCodecSpecific::MP3 => Mp4parseCodec::Mp3,
AudioCodecSpecific::ALACSpecificBox(_) => Mp4parseCodec::Alac,
#[cfg(feature = "3gpp")]
AudioCodecSpecific::AMRSpecificBox(_) => {
if audio.codec_type == CodecType::AMRNB {
Mp4parseCodec::AMRNB
} else {
Mp4parseCodec::AMRWB
}
}
};
sample_info.channels = audio.channelcount as u16;
sample_info.bit_depth = audio.samplesize;
@ -908,9 +761,9 @@ fn get_track_audio_info(
if esds.codec_esds.len() > std::u32::MAX as usize {
return Err(Mp4parseStatus::Invalid);
}
sample_info.extra_data.length = esds.codec_esds.len() as u32;
sample_info.extra_data.length = esds.codec_esds.len();
sample_info.extra_data.data = esds.codec_esds.as_ptr();
sample_info.codec_specific_config.length = esds.decoder_specific_data.len() as u32;
sample_info.codec_specific_config.length = esds.decoder_specific_data.len();
sample_info.codec_specific_config.data = esds.decoder_specific_data.as_ptr();
if let Some(rate) = esds.audio_sample_rate {
sample_info.sample_rate = rate;
@ -932,7 +785,7 @@ fn get_track_audio_info(
if streaminfo.block_type != 0 || streaminfo.data.len() != 34 {
return Err(Mp4parseStatus::Invalid);
}
sample_info.codec_specific_config.length = streaminfo.data.len() as u32;
sample_info.codec_specific_config.length = streaminfo.data.len();
sample_info.codec_specific_config.data = streaminfo.data.as_ptr();
}
AudioCodecSpecific::OpusSpecificBox(ref opus) => {
@ -947,17 +800,19 @@ fn get_track_audio_info(
if v.len() > std::u32::MAX as usize {
return Err(Mp4parseStatus::Invalid);
}
sample_info.codec_specific_config.length = v.len() as u32;
sample_info.codec_specific_config.length = v.len();
sample_info.codec_specific_config.data = v.as_ptr();
}
}
}
}
AudioCodecSpecific::ALACSpecificBox(ref alac) => {
sample_info.codec_specific_config.length = alac.data.len() as u32;
sample_info.codec_specific_config.length = alac.data.len();
sample_info.codec_specific_config.data = alac.data.as_ptr();
}
AudioCodecSpecific::MP3 | AudioCodecSpecific::LPCM => (),
#[cfg(feature = "3gpp")]
AudioCodecSpecific::AMRSpecificBox(_) => (),
}
if let Some(p) = audio
@ -966,7 +821,7 @@ fn get_track_audio_info(
.find(|sinf| sinf.tenc.is_some())
{
sample_info.protected_data.original_format =
OptionalFourCC::Some(p.original_format.value);
OptionalFourCc::Some(p.original_format.value);
sample_info.protected_data.scheme_type = match p.scheme_type {
Some(ref scheme_type_box) => {
match scheme_type_box.scheme_type.value.as_ref() {
@ -984,14 +839,10 @@ fn get_track_audio_info(
sample_info.protected_data.is_encrypted = tenc.is_encrypted;
sample_info.protected_data.iv_size = tenc.iv_size;
sample_info.protected_data.kid.set_data(&(tenc.kid));
sample_info.protected_data.crypt_byte_block = match tenc.crypt_byte_block_count {
Some(n) => n,
None => 0,
};
sample_info.protected_data.skip_byte_block = match tenc.skip_byte_block_count {
Some(n) => n,
None => 0,
};
sample_info.protected_data.crypt_byte_block =
tenc.crypt_byte_block_count.unwrap_or(0);
sample_info.protected_data.skip_byte_block =
tenc.skip_byte_block_count.unwrap_or(0);
if let Some(ref iv_vec) = tenc.constant_iv {
if iv_vec.len() > std::u32::MAX as usize {
return Err(Mp4parseStatus::Invalid);
@ -1132,7 +983,7 @@ fn mp4parse_get_track_video_info_safe(
.find(|sinf| sinf.tenc.is_some())
{
sample_info.protected_data.original_format =
OptionalFourCC::Some(p.original_format.value);
OptionalFourCc::Some(p.original_format.value);
sample_info.protected_data.scheme_type = match p.scheme_type {
Some(ref scheme_type_box) => {
match scheme_type_box.scheme_type.value.as_ref() {
@ -1150,14 +1001,10 @@ fn mp4parse_get_track_video_info_safe(
sample_info.protected_data.is_encrypted = tenc.is_encrypted;
sample_info.protected_data.iv_size = tenc.iv_size;
sample_info.protected_data.kid.set_data(&(tenc.kid));
sample_info.protected_data.crypt_byte_block = match tenc.crypt_byte_block_count {
Some(n) => n,
None => 0,
};
sample_info.protected_data.skip_byte_block = match tenc.skip_byte_block_count {
Some(n) => n,
None => 0,
};
sample_info.protected_data.crypt_byte_block =
tenc.crypt_byte_block_count.unwrap_or(0);
sample_info.protected_data.skip_byte_block =
tenc.skip_byte_block_count.unwrap_or(0);
if let Some(ref iv_vec) = tenc.constant_iv {
if iv_vec.len() > std::u32::MAX as usize {
return Err(Mp4parseStatus::Invalid);
@ -1268,7 +1115,7 @@ fn get_indice_table(
let media_time = match (&track.media_time, &track.timescale) {
(&Some(t), &Some(s)) => track_time_to_us(t, s)
.and_then(|v| i64::try_from(v).ok())
.and_then(|v| i64::try_from(v.0).ok())
.map(Into::into),
_ => None,
};
@ -1276,7 +1123,7 @@ fn get_indice_table(
let empty_duration: Option<CheckedInteger<_>> =
match (&track.empty_duration, &context.timescale) {
(&Some(e), &Some(s)) => media_time_to_us(e, s)
.and_then(|v| i64::try_from(v).ok())
.and_then(|v| i64::try_from(v.0).ok())
.map(Into::into),
_ => None,
};
@ -1300,340 +1147,6 @@ fn get_indice_table(
Err(Mp4parseStatus::Invalid)
}
// Convert a 'ctts' compact table to full table by iterator,
// (sample_with_the_same_offset_count, offset) => (offset), (offset), (offset) ...
//
// For example:
// (2, 10), (4, 9) into (10, 10, 9, 9, 9, 9) by calling next_offset_time().
struct TimeOffsetIterator<'a> {
cur_sample_range: std::ops::Range<u32>,
cur_offset: i64,
ctts_iter: Option<std::slice::Iter<'a, mp4parse::TimeOffset>>,
track_id: usize,
}
impl<'a> Iterator for TimeOffsetIterator<'a> {
type Item = i64;
#[allow(clippy::reversed_empty_ranges)]
fn next(&mut self) -> Option<i64> {
let has_sample = self.cur_sample_range.next().or_else(|| {
// At end of current TimeOffset, find the next TimeOffset.
let iter = match self.ctts_iter {
Some(ref mut v) => v,
_ => return None,
};
let offset_version;
self.cur_sample_range = match iter.next() {
Some(v) => {
offset_version = v.time_offset;
0..v.sample_count
}
_ => {
offset_version = mp4parse::TimeOffsetVersion::Version0(0);
0..0
}
};
self.cur_offset = match offset_version {
mp4parse::TimeOffsetVersion::Version0(i) => i64::from(i),
mp4parse::TimeOffsetVersion::Version1(i) => i64::from(i),
};
self.cur_sample_range.next()
});
has_sample.and(Some(self.cur_offset))
}
}
impl<'a> TimeOffsetIterator<'a> {
fn next_offset_time(&mut self) -> TrackScaledTime<i64> {
match self.next() {
Some(v) => TrackScaledTime::<i64>(v as i64, self.track_id),
_ => TrackScaledTime::<i64>(0, self.track_id),
}
}
}
// Convert 'stts' compact table to full table by iterator,
// (sample_count_with_the_same_time, time) => (time, time, time) ... repeats
// sample_count_with_the_same_time.
//
// For example:
// (2, 3000), (1, 2999) to (3000, 3000, 2999).
struct TimeToSampleIterator<'a> {
cur_sample_count: std::ops::Range<u32>,
cur_sample_delta: u32,
stts_iter: std::slice::Iter<'a, mp4parse::Sample>,
track_id: usize,
}
impl<'a> Iterator for TimeToSampleIterator<'a> {
type Item = u32;
#[allow(clippy::reversed_empty_ranges)]
fn next(&mut self) -> Option<u32> {
let has_sample = self.cur_sample_count.next().or_else(|| {
self.cur_sample_count = match self.stts_iter.next() {
Some(v) => {
self.cur_sample_delta = v.sample_delta;
0..v.sample_count
}
_ => 0..0,
};
self.cur_sample_count.next()
});
has_sample.and(Some(self.cur_sample_delta))
}
}
impl<'a> TimeToSampleIterator<'a> {
fn next_delta(&mut self) -> TrackScaledTime<i64> {
match self.next() {
Some(v) => TrackScaledTime::<i64>(i64::from(v), self.track_id),
_ => TrackScaledTime::<i64>(0, self.track_id),
}
}
}
// Convert 'stco' compact table to full table by iterator.
// (start_chunk_num, sample_number) => (start_chunk_num, sample_number),
// (start_chunk_num + 1, sample_number),
// (start_chunk_num + 2, sample_number),
// ...
// (next start_chunk_num, next sample_number),
// ...
//
// For example:
// (1, 5), (5, 10), (9, 2) => (1, 5), (2, 5), (3, 5), (4, 5), (5, 10), (6, 10),
// (7, 10), (8, 10), (9, 2)
fn sample_to_chunk_iter<'a>(
stsc_samples: &'a TryVec<mp4parse::SampleToChunk>,
stco_offsets: &'a TryVec<u64>,
) -> SampleToChunkIterator<'a> {
SampleToChunkIterator {
chunks: (0..0),
sample_count: 0,
stsc_peek_iter: stsc_samples.as_slice().iter().peekable(),
remain_chunk_count: stco_offsets
.len()
.try_into()
.expect("stco.entry_count is u32"),
}
}
struct SampleToChunkIterator<'a> {
chunks: std::ops::Range<u32>,
sample_count: u32,
stsc_peek_iter: std::iter::Peekable<std::slice::Iter<'a, mp4parse::SampleToChunk>>,
remain_chunk_count: u32, // total chunk number from 'stco'.
}
impl<'a> Iterator for SampleToChunkIterator<'a> {
type Item = (u32, u32);
fn next(&mut self) -> Option<(u32, u32)> {
let has_chunk = self.chunks.next().or_else(|| {
self.chunks = self.locate();
self.remain_chunk_count
.checked_sub(
self.chunks
.len()
.try_into()
.expect("len() of a Range<u32> must fit in u32"),
)
.and_then(|res| {
self.remain_chunk_count = res;
self.chunks.next()
})
});
has_chunk.map(|id| (id, self.sample_count))
}
}
impl<'a> SampleToChunkIterator<'a> {
#[allow(clippy::reversed_empty_ranges)]
fn locate(&mut self) -> std::ops::Range<u32> {
loop {
return match (self.stsc_peek_iter.next(), self.stsc_peek_iter.peek()) {
(Some(next), Some(peek)) if next.first_chunk == peek.first_chunk => {
// Invalid entry, skip it and will continue searching at
// next loop iteration.
continue;
}
(Some(next), Some(peek)) if next.first_chunk > 0 && peek.first_chunk > 0 => {
self.sample_count = next.samples_per_chunk;
(next.first_chunk - 1)..(peek.first_chunk - 1)
}
(Some(next), None) if next.first_chunk > 0 => {
self.sample_count = next.samples_per_chunk;
// Total chunk number in 'stsc' could be different to 'stco',
// there could be more chunks at the last 'stsc' record.
match next.first_chunk.checked_add(self.remain_chunk_count) {
Some(r) => (next.first_chunk - 1)..r - 1,
_ => 0..0,
}
}
_ => 0..0,
};
}
}
}
#[allow(clippy::reversed_empty_ranges)]
fn create_sample_table(
track: &Track,
track_offset_time: CheckedInteger<i64>,
) -> Option<TryVec<Mp4parseIndice>> {
let timescale = match track.timescale {
Some(ref t) => TrackTimeScale::<i64>(t.0 as i64, t.1),
_ => return None,
};
let (stsc, stco, stsz, stts) = match (&track.stsc, &track.stco, &track.stsz, &track.stts) {
(&Some(ref a), &Some(ref b), &Some(ref c), &Some(ref d)) => (a, b, c, d),
_ => return None,
};
// According to spec, no sync table means every sample is sync sample.
let has_sync_table = match track.stss {
Some(_) => true,
_ => false,
};
let mut sample_size_iter = stsz.sample_sizes.iter();
// Get 'stsc' iterator for (chunk_id, chunk_sample_count) and calculate the sample
// offset address.
// With large numbers of samples, the cost of many allocations dominates,
// so it's worth iterating twice to allocate sample_table just once.
let total_sample_count = sample_to_chunk_iter(&stsc.samples, &stco.offsets)
.by_ref()
.map(|(_, sample_counts)| sample_counts.to_usize())
.sum();
let mut sample_table = TryVec::with_capacity(total_sample_count).ok()?;
for i in sample_to_chunk_iter(&stsc.samples, &stco.offsets) {
let chunk_id = i.0 as usize;
let sample_counts = i.1;
let mut cur_position = match stco.offsets.get(chunk_id) {
Some(&i) => i.into(),
_ => return None,
};
for _ in 0..sample_counts {
let start_offset = cur_position;
let end_offset = match (stsz.sample_size, sample_size_iter.next()) {
(_, Some(t)) => (start_offset + *t)?,
(t, _) if t > 0 => (start_offset + t)?,
_ => 0.into(),
};
if end_offset == 0 {
return None;
}
cur_position = end_offset;
sample_table
.push(Mp4parseIndice {
start_offset,
end_offset,
sync: !has_sync_table,
..Default::default()
})
.ok()?;
}
}
// Mark the sync sample in sample_table according to 'stss'.
if let Some(ref v) = track.stss {
for iter in &v.samples {
match iter
.checked_sub(&1)
.and_then(|idx| sample_table.get_mut(idx as usize))
{
Some(elem) => elem.sync = true,
_ => return None,
}
}
}
let ctts_iter = match track.ctts {
Some(ref v) => Some(v.samples.as_slice().iter()),
_ => None,
};
let mut ctts_offset_iter = TimeOffsetIterator {
cur_sample_range: (0..0),
cur_offset: 0,
ctts_iter,
track_id: track.id,
};
let mut stts_iter = TimeToSampleIterator {
cur_sample_count: (0..0),
cur_sample_delta: 0,
stts_iter: stts.samples.as_slice().iter(),
track_id: track.id,
};
// sum_delta is the sum of stts_iter delta.
// According to sepc:
// decode time => DT(n) = DT(n-1) + STTS(n)
// composition time => CT(n) = DT(n) + CTTS(n)
// Note:
// composition time needs to add the track offset time from 'elst' table.
let mut sum_delta = TrackScaledTime::<i64>(0, track.id);
for sample in sample_table.as_mut_slice() {
let decode_time = sum_delta;
sum_delta = (sum_delta + stts_iter.next_delta())?;
// ctts_offset is the current sample offset time.
let ctts_offset = ctts_offset_iter.next_offset_time();
let start_composition = track_time_to_us((decode_time + ctts_offset)?, timescale)?;
let end_composition = track_time_to_us((sum_delta + ctts_offset)?, timescale)?;
let start_decode = track_time_to_us(decode_time, timescale)?;
sample.start_composition = (track_offset_time + start_composition)?;
sample.end_composition = (track_offset_time + end_composition)?;
sample.start_decode = start_decode.into();
}
// Correct composition end time due to 'ctts' causes composition time re-ordering.
//
// Composition end time is not in specification. However, gecko needs it, so we need to
// calculate to correct the composition end time.
if !sample_table.is_empty() {
// Create an index table refers to sample_table and sorted by start_composisiton time.
let mut sort_table = TryVec::with_capacity(sample_table.len()).ok()?;
for i in 0..sample_table.len() {
sort_table.push(i).ok()?;
}
sort_table.sort_by_key(|i| match sample_table.get(*i) {
Some(v) => v.start_composition,
_ => 0.into(),
});
for indices in sort_table.windows(2) {
if let [current_index, peek_index] = *indices {
let next_start_composition_time = sample_table[peek_index].start_composition;
let sample = &mut sample_table[current_index];
sample.end_composition = next_start_composition_time;
}
}
}
Some(sample_table)
}
/// Fill the supplied `Mp4parseFragmentInfo` with metadata from fragmented file.
///
/// # Safety
@ -1667,7 +1180,7 @@ pub unsafe extern "C" fn mp4parse_get_fragment_info(
if let (Some(time), Some(scale)) = (duration, context.timescale) {
info.fragment_duration = match media_time_to_us(time, scale) {
Some(time_us) => time_us as u64,
Some(time_us) => time_us.0 as u64,
None => return Mp4parseStatus::Invalid,
}
}
@ -2084,35 +1597,3 @@ fn minimal_mp4_get_track_info_invalid_track_number() {
mp4parse_free(parser);
}
}
#[test]
fn rational_scale_overflow() {
assert_eq!(rational_scale::<u64, u64>(17, 3, 1000), Some(5666));
let large = 0x4000_0000_0000_0000;
assert_eq!(rational_scale::<u64, u64>(large, 2, 2), Some(large));
assert_eq!(rational_scale::<u64, u64>(large, 4, 4), Some(large));
assert_eq!(rational_scale::<u64, u64>(large, 2, 8), None);
assert_eq!(rational_scale::<u64, u64>(large, 8, 4), Some(large / 2));
assert_eq!(rational_scale::<u64, u64>(large + 1, 4, 4), Some(large + 1));
assert_eq!(rational_scale::<u64, u64>(large, 40, 1000), None);
}
#[test]
fn media_time_overflow() {
let scale = MediaTimeScale(90000);
let duration = MediaScaledTime(9_007_199_254_710_000);
assert_eq!(
media_time_to_us(duration, scale),
Some(100_079_991_719_000_000)
);
}
#[test]
fn track_time_overflow() {
let scale = TrackTimeScale(44100u64, 0);
let duration = TrackScaledTime(4_413_527_634_807_900u64, 0);
assert_eq!(
track_time_to_us(duration, scale),
Some(100_079_991_719_000_000)
);
}

View File

@ -52,7 +52,7 @@ fn parse_cenc() {
let protected_data = &(*video.sample_info).protected_data;
assert_eq!(
protected_data.original_format,
OptionalFourCC::Some(*b"avc1")
OptionalFourCc::Some(*b"avc1")
);
assert_eq!(
protected_data.scheme_type,
@ -84,7 +84,7 @@ fn parse_cenc() {
let protected_data = &(*audio.sample_info).protected_data;
assert_eq!(
protected_data.original_format,
OptionalFourCC::Some(*b"mp4a")
OptionalFourCc::Some(*b"mp4a")
);
assert_eq!(protected_data.is_encrypted, 0x01);
assert_eq!(protected_data.iv_size, 16);
@ -137,7 +137,7 @@ fn parse_cbcs() {
let protected_data = &(*video.sample_info).protected_data;
assert_eq!(
protected_data.original_format,
OptionalFourCC::Some(*b"avc1")
OptionalFourCc::Some(*b"avc1")
);
assert_eq!(
protected_data.scheme_type,
@ -197,7 +197,7 @@ fn parse_unencrypted() {
assert_eq!(rv, Mp4parseStatus::Ok);
assert_eq!(audio.sample_info_count, 1);
let protected_data = &(*audio.sample_info).protected_data;
assert_eq!(protected_data.original_format, OptionalFourCC::None);
assert_eq!(protected_data.original_format, OptionalFourCc::None);
assert_eq!(
protected_data.scheme_type,
Mp4ParseEncryptionSchemeType::None
@ -266,7 +266,7 @@ fn parse_encrypted_av1() {
let protected_data = &(*video.sample_info).protected_data;
assert_eq!(
protected_data.original_format,
OptionalFourCC::Some(*b"av01")
OptionalFourCc::Some(*b"av01")
);
assert_eq!(
protected_data.scheme_type,

View File

@ -1,4 +1,6 @@
extern crate mp4parse;
extern crate mp4parse_capi;
use mp4parse::unstable::Indice;
use mp4parse_capi::*;
use std::io::Read;
@ -47,7 +49,7 @@ fn parse_sample_table() {
assert_eq!(rv, Mp4parseStatus::Ok);
// Compare the value from stagefright.
let audio_indice_0 = Mp4parseIndice {
let audio_indice_0 = Indice {
start_offset: 27_046.into(),
end_offset: 27_052.into(),
start_composition: 0.into(),
@ -55,7 +57,7 @@ fn parse_sample_table() {
start_decode: 0.into(),
sync: true,
};
let audio_indice_215 = Mp4parseIndice {
let audio_indice_215 = Indice {
start_offset: 283_550.into(),
end_offset: 283_556.into(),
start_composition: 9_984_580.into(),
@ -82,7 +84,7 @@ fn parse_sample_table() {
assert_eq!(rv, Mp4parseStatus::Ok);
// Compare the last few data from stagefright.
let video_indice_291 = Mp4parseIndice {
let video_indice_291 = Indice {
start_offset: 280_226.into(),
end_offset: 280_855.into(),
start_composition: 9_838_333.into(),
@ -90,7 +92,7 @@ fn parse_sample_table() {
start_decode: 9_710_000.into(),
sync: false,
};
let video_indice_292 = Mp4parseIndice {
let video_indice_292 = Indice {
start_offset: 280_855.into(),
end_offset: 281_297.into(),
start_composition: 9_805_011.into(),
@ -100,9 +102,9 @@ fn parse_sample_table() {
};
// TODO: start_composition time in stagefright is 9905000, but it is 9904999 in parser, it
// could be rounding error.
//let video_indice_293 = Mp4parseIndice { start_offset: 281_297, end_offset: 281_919, start_composition: 9_905_000, end_composition: 9_938_344, start_decode: 9_776_666, sync: false };
//let video_indice_294 = Mp4parseIndice { start_offset: 281_919, end_offset: 282_391, start_composition: 9_871_677, end_composition: 9_905_000, start_decode: 9_776_677, sync: false };
let video_indice_295 = Mp4parseIndice {
//let video_indice_293 = Indice { start_offset: 281_297, end_offset: 281_919, start_composition: 9_905_000, end_composition: 9_938_344, start_decode: 9_776_666, sync: false };
//let video_indice_294 = Indice { start_offset: 281_919, end_offset: 282_391, start_composition: 9_871_677, end_composition: 9_905_000, start_decode: 9_776_677, sync: false };
let video_indice_295 = Indice {
start_offset: 282_391.into(),
end_offset: 283_032.into(),
start_composition: 9_971_666.into(),
@ -110,7 +112,7 @@ fn parse_sample_table() {
start_decode: 9_843_333.into(),
sync: false,
};
let video_indice_296 = Mp4parseIndice {
let video_indice_296 = Indice {
start_offset: 283_092.into(),
end_offset: 283_526.into(),
start_composition: 9_938_344.into(),
@ -168,7 +170,7 @@ fn parse_sample_table_with_elst() {
// Compare the value from stagefright.
// Due to 'elst', the start_composition and end_composition are negative
// at first two samples.
let audio_indice_0 = Mp4parseIndice {
let audio_indice_0 = Indice {
start_offset: 6992.into(),
end_offset: 7363.into(),
start_composition: (-36281).into(),
@ -176,7 +178,7 @@ fn parse_sample_table_with_elst() {
start_decode: 0.into(),
sync: true,
};
let audio_indice_1 = Mp4parseIndice {
let audio_indice_1 = Indice {
start_offset: 7363.into(),
end_offset: 7735.into(),
start_composition: (-13062).into(),
@ -184,7 +186,7 @@ fn parse_sample_table_with_elst() {
start_decode: 23219.into(),
sync: true,
};
let audio_indice_2 = Mp4parseIndice {
let audio_indice_2 = Indice {
start_offset: 7735.into(),
end_offset: 8106.into(),
start_composition: 10158.into(),
@ -235,7 +237,7 @@ fn parse_sample_table_with_negative_ctts() {
assert_eq!(rv, Mp4parseStatus::Ok);
// There are negative value in 'ctts' table.
let video_indice_0 = Mp4parseIndice {
let video_indice_0 = Indice {
start_offset: 48.into(),
end_offset: 890.into(),
start_composition: 0.into(),
@ -243,7 +245,7 @@ fn parse_sample_table_with_negative_ctts() {
start_decode: 0.into(),
sync: true,
};
let video_indice_1 = Mp4parseIndice {
let video_indice_1 = Indice {
start_offset: 890.into(),
end_offset: 913.into(),
start_composition: 133_333.into(),
@ -251,7 +253,7 @@ fn parse_sample_table_with_negative_ctts() {
start_decode: 33_333.into(),
sync: false,
};
let video_indice_2 = Mp4parseIndice {
let video_indice_2 = Indice {
start_offset: 913.into(),
end_offset: 934.into(),
start_composition: 66_666.into(),
@ -259,7 +261,7 @@ fn parse_sample_table_with_negative_ctts() {
start_decode: 66_666.into(),
sync: false,
};
let video_indice_3 = Mp4parseIndice {
let video_indice_3 = Indice {
start_offset: 934.into(),
end_offset: 955.into(),
start_composition: 33_333.into(),

View File

@ -9,7 +9,7 @@ description = "Shared Rust code for libxul"
geckoservo = { path = "../../../../servo/ports/geckolib" }
kvstore = { path = "../../../components/kvstore" }
lmdb-rkv-sys = { version = "0.11", features = ["mdb_idl_logn_9"] }
mp4parse_capi = { git = "https://github.com/mozilla/mp4parse-rust", rev = "94c3b1f368c82aefcbf51967f6aa296a9ccceb69" }
mp4parse_capi = { git = "https://github.com/mozilla/mp4parse-rust", rev = "dd967732ac836cc1ed5f1a7259d912c5d13e009b" }
nserror = { path = "../../../../xpcom/rust/nserror" }
nsstring = { path = "../../../../xpcom/rust/nsstring" }
netwerk_helper = { path = "../../../../netwerk/base/rust-helper" }