Merge mozilla-central to mozilla-inbound. CLOSED TREE

This commit is contained in:
Csoregi Natalia 2019-02-24 11:24:54 +02:00
commit 035247d4b6
59 changed files with 872 additions and 513 deletions

View File

@ -335,6 +335,13 @@ textarea.browser-style:focus:hover {
box-shadow: 0 0 0 2px rgba(97, 181, 255, 0.75);
}
.browser-style > input[type="text"]:invalid:not(:focus),
textarea.browser-style:invalid:not(:focus) {
border-color: var(--red-60);
box-shadow: 0 0 0 1px var(--red-60),
0 0 0 4px rgba(251, 0, 34, 0.3);
}
/* stylelint-disable property-no-vendor-prefix */
.panel-section {
display: flex;

View File

@ -258,6 +258,26 @@ const AnimationProperty* KeyframeEffect::GetEffectiveAnimationOfProperty(
return nullptr;
}
bool KeyframeEffect::HasEffectiveAnimationOfPropertySet(
const nsCSSPropertyIDSet& aPropertySet, const EffectSet& aEffect) const {
bool ret = false;
for (const AnimationProperty& property : mProperties) {
if (!aPropertySet.HasProperty(property.mProperty)) {
continue;
}
// Only consider the property if it is not overridden by !important rules in
// the transitions level. If one of the properties is overridden by
// !important rules, we return false. This is especially for transform-like
// properties because all of them should be running on the same thread.
if (!IsEffectiveProperty(aEffect, property.mProperty)) {
return false;
}
ret = true;
}
return ret;
}
nsCSSPropertyIDSet KeyframeEffect::GetPropertiesForCompositor(
EffectSet& aEffects, const nsIFrame* aFrame) const {
MOZ_ASSERT(&aEffects ==
@ -1659,7 +1679,9 @@ bool KeyframeEffect::ContainsAnimatedScale(const nsIFrame* aFrame) const {
}
for (const AnimationProperty& prop : mProperties) {
if (prop.mProperty != eCSSProperty_transform) {
if (prop.mProperty != eCSSProperty_transform &&
prop.mProperty != eCSSProperty_scale &&
prop.mProperty != eCSSProperty_rotate) {
continue;
}

View File

@ -201,6 +201,13 @@ class KeyframeEffect : public AnimationEffect {
const AnimationProperty* GetEffectiveAnimationOfProperty(
nsCSSPropertyID aProperty, const EffectSet& aEffect) const;
// This is a similar version as the above function, but for a
// nsCSSPropertyIDSet, and this returns true if this keyframe effect has
// properties in |aPropertySet| and if the properties are not overridden by
// !important rule or transition level.
bool HasEffectiveAnimationOfPropertySet(
const nsCSSPropertyIDSet& aPropertySet, const EffectSet& aEffect) const;
// Returns all the effective animated CSS properties that can be animated on
// the compositor and are not overridden by a higher cascade level.
//

View File

@ -3455,7 +3455,8 @@ nsDOMWindowUtils::GetOMTCTransform(Element* aElement,
}
DisplayItemType itemType = DisplayItemType::TYPE_TRANSFORM;
if (nsLayoutUtils::HasEffectiveAnimation(frame, eCSSProperty_opacity) &&
if (nsLayoutUtils::HasEffectiveAnimation(
frame, nsCSSPropertyIDSet::OpacityProperties()) &&
!frame->IsTransformed()) {
itemType = DisplayItemType::TYPE_OPACITY;
}

View File

@ -6,9 +6,10 @@
#include shared,prim_shared,brush
varying vec3 vSrcUv;
varying vec3 vBackdropUv;
flat varying int vOp;
varying vec4 vSourceAndBackdropUv;
flat varying ivec4 vSourceUvBounds;
flat varying ivec4 vBackdropUvBounds;
flat varying ivec3 vOpAndLayers;
#ifdef WR_VERTEX_SHADER
@ -17,6 +18,10 @@ vec2 snap_device_pos(VertexInfo vi, float device_pixel_scale) {
return vi.world_pos.xy * device_pixel_scale / max(0.0, vi.world_pos.w) + vi.snap_offset;
}
ivec4 rect_to_ivec(RectWithSize rect) {
return ivec4(rect.p0, rect.p0 + rect.size - 1.0);
}
void brush_vs(
VertexInfo vi,
int prim_address,
@ -29,20 +34,25 @@ void brush_vs(
vec4 unused
) {
vec2 snapped_device_pos = snap_device_pos(vi, pic_task.device_pixel_scale);
vec2 texture_size = vec2(textureSize(sPrevPassColor, 0));
vOp = user_data.x;
PictureTask src_task = fetch_picture_task(user_data.z);
PictureTask backdrop_task = fetch_picture_task(user_data.y);
vec2 src_uv = snapped_device_pos +
src_task.common_data.task_rect.p0 -
src_task.content_origin;
vSrcUv = vec3(src_uv / texture_size, src_task.common_data.texture_layer_index);
RenderTaskCommonData backdrop_task = fetch_render_task_common_data(user_data.y);
vec2 backdrop_uv = snapped_device_pos +
backdrop_task.task_rect.p0 -
src_task.content_origin;
vBackdropUv = vec3(backdrop_uv / texture_size, backdrop_task.texture_layer_index);
backdrop_task.common_data.task_rect.p0 -
backdrop_task.content_origin;
vSourceAndBackdropUv = vec4(src_uv, backdrop_uv);
vSourceUvBounds = rect_to_ivec(src_task.common_data.task_rect);
vBackdropUvBounds = rect_to_ivec(backdrop_task.common_data.task_rect);
vOpAndLayers = ivec3(
user_data.x,
int(src_task.common_data.texture_layer_index),
int(backdrop_task.common_data.texture_layer_index)
);
}
#endif
@ -205,82 +215,89 @@ const int MixBlendMode_Color = 14;
const int MixBlendMode_Luminosity = 15;
Fragment brush_fs() {
vec4 Cb = textureLod(sPrevPassColor, vBackdropUv, 0.0);
vec4 Cs = textureLod(sPrevPassColor, vSrcUv, 0.0);
// The mix-blend-mode functions assume no premultiplied alpha
if (Cb.a != 0.0) {
Cb.rgb /= Cb.a;
}
if (Cs.a != 0.0) {
Cs.rgb /= Cs.a;
}
// Return yellow if none of the branches match (shouldn't happen).
vec4 result = vec4(1.0, 1.0, 0.0, 1.0);
switch (vOp) {
case MixBlendMode_Multiply:
result.rgb = Multiply(Cb.rgb, Cs.rgb);
break;
case MixBlendMode_Screen:
result.rgb = Screen(Cb.rgb, Cs.rgb);
break;
case MixBlendMode_Overlay:
// Overlay is inverse of Hardlight
result.rgb = HardLight(Cs.rgb, Cb.rgb);
break;
case MixBlendMode_Darken:
result.rgb = min(Cs.rgb, Cb.rgb);
break;
case MixBlendMode_Lighten:
result.rgb = max(Cs.rgb, Cb.rgb);
break;
case MixBlendMode_ColorDodge:
result.r = ColorDodge(Cb.r, Cs.r);
result.g = ColorDodge(Cb.g, Cs.g);
result.b = ColorDodge(Cb.b, Cs.b);
break;
case MixBlendMode_ColorBurn:
result.r = ColorBurn(Cb.r, Cs.r);
result.g = ColorBurn(Cb.g, Cs.g);
result.b = ColorBurn(Cb.b, Cs.b);
break;
case MixBlendMode_HardLight:
result.rgb = HardLight(Cb.rgb, Cs.rgb);
break;
case MixBlendMode_SoftLight:
result.r = SoftLight(Cb.r, Cs.r);
result.g = SoftLight(Cb.g, Cs.g);
result.b = SoftLight(Cb.b, Cs.b);
break;
case MixBlendMode_Difference:
result.rgb = Difference(Cb.rgb, Cs.rgb);
break;
case MixBlendMode_Exclusion:
result.rgb = Exclusion(Cb.rgb, Cs.rgb);
break;
case MixBlendMode_Hue:
result.rgb = Hue(Cb.rgb, Cs.rgb);
break;
case MixBlendMode_Saturation:
result.rgb = Saturation(Cb.rgb, Cs.rgb);
break;
case MixBlendMode_Color:
result.rgb = Color(Cb.rgb, Cs.rgb);
break;
case MixBlendMode_Luminosity:
result.rgb = Luminosity(Cb.rgb, Cs.rgb);
break;
default: break;
ivec2 source_uv = ivec2(floor(vSourceAndBackdropUv.xy));
vec4 Cs = source_uv == clamp(source_uv, vSourceUvBounds.xy, vSourceUvBounds.zw) ?
texelFetch(sPrevPassColor, ivec3(source_uv, vOpAndLayers.y), 0) :
vec4(0.0);
ivec2 backdrop_uv = ivec2(floor(vSourceAndBackdropUv.zw));
vec4 Cb = backdrop_uv == clamp(backdrop_uv, vBackdropUvBounds.xy, vBackdropUvBounds.zw) ?
texelFetch(sPrevPassColor, ivec3(backdrop_uv, vOpAndLayers.z), 0) :
vec4(0.0);
if (Cs.a == 0.0) {
result = Cb;
} else if (Cb.a == 0.0) {
result = Cs;
} else {
vec3 original_backdrop = Cb.rgb;
// The mix-blend-mode functions assume no premultiplied alpha
Cs.rgb /= Cs.a;
Cb.rgb /= Cb.a;
switch (vOpAndLayers.x) {
case MixBlendMode_Multiply:
result.rgb = Multiply(Cb.rgb, Cs.rgb);
break;
case MixBlendMode_Screen:
result.rgb = Screen(Cb.rgb, Cs.rgb);
break;
case MixBlendMode_Overlay:
// Overlay is inverse of Hardlight
result.rgb = HardLight(Cs.rgb, Cb.rgb);
break;
case MixBlendMode_Darken:
result.rgb = min(Cs.rgb, Cb.rgb);
break;
case MixBlendMode_Lighten:
result.rgb = max(Cs.rgb, Cb.rgb);
break;
case MixBlendMode_ColorDodge:
result.r = ColorDodge(Cb.r, Cs.r);
result.g = ColorDodge(Cb.g, Cs.g);
result.b = ColorDodge(Cb.b, Cs.b);
break;
case MixBlendMode_ColorBurn:
result.r = ColorBurn(Cb.r, Cs.r);
result.g = ColorBurn(Cb.g, Cs.g);
result.b = ColorBurn(Cb.b, Cs.b);
break;
case MixBlendMode_HardLight:
result.rgb = HardLight(Cb.rgb, Cs.rgb);
break;
case MixBlendMode_SoftLight:
result.r = SoftLight(Cb.r, Cs.r);
result.g = SoftLight(Cb.g, Cs.g);
result.b = SoftLight(Cb.b, Cs.b);
break;
case MixBlendMode_Difference:
result.rgb = Difference(Cb.rgb, Cs.rgb);
break;
case MixBlendMode_Exclusion:
result.rgb = Exclusion(Cb.rgb, Cs.rgb);
break;
case MixBlendMode_Hue:
result.rgb = Hue(Cb.rgb, Cs.rgb);
break;
case MixBlendMode_Saturation:
result.rgb = Saturation(Cb.rgb, Cs.rgb);
break;
case MixBlendMode_Color:
result.rgb = Color(Cb.rgb, Cs.rgb);
break;
case MixBlendMode_Luminosity:
result.rgb = Luminosity(Cb.rgb, Cs.rgb);
break;
default: break;
}
vec3 rgb = (1.0 - Cb.a) * Cs.rgb + Cb.a * result.rgb;
// simulate alpha-blending with the backdrop
result = mix(vec4(original_backdrop, Cb.a), vec4(rgb, 1.0), Cs.a);
}
result.rgb = (1.0 - Cb.a) * Cs.rgb + Cb.a * result.rgb;
result.a = Cs.a;
result.rgb *= result.a;
return Fragment(result);
}
#endif

View File

@ -29,7 +29,7 @@ use scene::FilterOpHelpers;
use smallvec::SmallVec;
use std::{f32, i32, usize};
use tiling::{RenderTargetContext};
use util::{project_rect, TransformedRectKind};
use util::{project_rect, MaxRect, TransformedRectKind};
// Special sentinel value recognized by the shader. It is considered to be
// a dummy task that doesn't mask out anything.
@ -979,10 +979,10 @@ impl AlphaBatchBuilder {
// Convert all children of the 3D hierarchy root into batches.
Picture3DContext::In { root_data: Some(ref list), .. } => {
for child in list {
let prim_instance = &picture.prim_list.prim_instances[child.anchor];
let prim_info = &ctx.scratch.prim_info[prim_instance.visibility_info.0 as usize];
let child_prim_instance = &picture.prim_list.prim_instances[child.anchor];
let child_prim_info = &ctx.scratch.prim_info[child_prim_instance.visibility_info.0 as usize];
let child_pic_index = match prim_instance.kind {
let child_pic_index = match child_prim_instance.kind {
PrimitiveInstanceKind::Picture { pic_index, .. } => pic_index,
PrimitiveInstanceKind::LineDecoration { .. } |
PrimitiveInstanceKind::TextRun { .. } |
@ -1003,14 +1003,14 @@ impl AlphaBatchBuilder {
// Get clip task, if set, for the picture primitive.
let clip_task_address = get_clip_task_address(
&ctx.scratch.clip_mask_instances,
prim_info.clip_task_index,
child_prim_info.clip_task_index,
0,
render_tasks,
).unwrap_or(OPAQUE_TASK_ADDRESS);
let prim_header = PrimitiveHeader {
let child_header = PrimitiveHeader {
local_rect: pic.local_rect,
local_clip_rect: prim_info.combined_local_clip_rect,
local_clip_rect: child_prim_info.combined_local_clip_rect,
task_address,
specific_prim_address: GpuCacheAddress::invalid(),
clip_task_address,
@ -1037,7 +1037,7 @@ impl AlphaBatchBuilder {
gpu_cache,
);
let prim_header_index = prim_headers.push(&prim_header, z_id, [
let prim_header_index = prim_headers.push(&child_header, z_id, [
uv_rect_address.as_int(),
if raster_config.establishes_raster_root { 1 } else { 0 },
0,
@ -1057,7 +1057,7 @@ impl AlphaBatchBuilder {
self.current_batch_list().push_single_instance(
key,
&prim_info.clip_chain.pic_clip_rect,
&child_prim_info.clip_chain.pic_clip_rect,
z_id,
PrimitiveInstanceData::from(instance),
);
@ -1127,7 +1127,7 @@ impl AlphaBatchBuilder {
// Get the local rect of the tile.
let tile_rect = tile.local_rect;
let prim_header = PrimitiveHeader {
let tile_header = PrimitiveHeader {
local_rect: tile_rect,
local_clip_rect,
task_address,
@ -1136,7 +1136,7 @@ impl AlphaBatchBuilder {
transform_id,
};
let prim_header_index = prim_headers.push(&prim_header, z_id, [
let prim_header_index = prim_headers.push(&tile_header, z_id, [
ShaderColorMode::Image as i32 | ((AlphaType::PremultipliedAlpha as i32) << 16),
RasterizationSpace::Local as i32,
get_shader_opacity(1.0),
@ -1452,28 +1452,71 @@ impl AlphaBatchBuilder {
}
}
}
PictureCompositeMode::MixBlend(mode) => {
let surface = ctx.surfaces[raster_config.surface_index.0]
PictureCompositeMode::Puppet { master: Some(source) } if ctx.is_picture_surface_visible(source) => return,
PictureCompositeMode::MixBlend { mode, backdrop } if ctx.is_picture_surface_visible(backdrop) => {
let backdrop_picture = &ctx.prim_store.pictures[backdrop.0];
let source_id = ctx
.surfaces[raster_config.surface_index.0]
.surface
.as_ref()
.expect("bug: surface must be allocated by now");
let cache_task_id = surface.resolve_render_task_id();
let backdrop_id = picture.secondary_render_task_id.expect("no backdrop!?");
.expect("bug: source surface must be allocated by now")
.resolve_render_task_id();
let backdrop_surface_id = backdrop_picture.raster_config
.as_ref()
.unwrap()
.surface_index;
let backdrop_id = ctx.surfaces[backdrop_surface_id.0]
.surface
.as_ref()
.expect("bug: backdrop surface must be allocated by now")
.resolve_render_task_id();
let key = BatchKey::new(
BatchKind::Brush(
BrushBatchKind::MixBlend {
task_id,
source_id: cache_task_id,
source_id,
backdrop_id,
},
),
BlendMode::PremultipliedAlpha,
non_segmented_blend_mode,
BatchTextures::no_texture(),
);
// The trick here is to draw the picture in the space of the backdrop,
// since the source can be attached to a child spatial node.
let expanded_header = PrimitiveHeader {
local_rect: ctx.clip_scroll_tree
.map_rect_to_parent_space(
prim_header.local_rect,
picture.spatial_node_index,
backdrop_picture.spatial_node_index,
&backdrop_picture.local_rect, //Note: this shouldn't be used
)
.unwrap_or_else(LayoutRect::zero)
.union(&backdrop_picture.local_rect),
local_clip_rect: ctx.clip_scroll_tree
.map_rect_to_parent_space(
prim_header.local_clip_rect,
picture.spatial_node_index,
backdrop_picture.spatial_node_index,
&backdrop_picture.local_clip_rect, //Note: this shouldn't be used
)
.unwrap_or_else(LayoutRect::zero)
.union(&backdrop_picture.local_clip_rect),
transform_id: transforms
.get_id(
backdrop_picture.spatial_node_index,
root_spatial_node_index,
ctx.clip_scroll_tree,
),
..prim_header
};
let backdrop_task_address = render_tasks.get_task_address(backdrop_id);
let source_task_address = render_tasks.get_task_address(cache_task_id);
let prim_header_index = prim_headers.push(&prim_header, z_id, [
let source_task_address = render_tasks.get_task_address(source_id);
let prim_header_index = prim_headers.push(&expanded_header, z_id, [
mode as u32 as i32,
backdrop_task_address.0 as i32,
source_task_address.0 as i32,
@ -1487,14 +1530,20 @@ impl AlphaBatchBuilder {
brush_flags,
user_data: 0,
};
//TODO: investigate if we can do better. We can't use the `bounding_rect`
// here because we effectively merge the call with the backdrop,
// and the instance for the backdrop isn't available here.
let conservative_bounding_rect = PictureRect::max_rect();
self.current_batch_list().push_single_instance(
key,
bounding_rect,
&conservative_bounding_rect,
z_id,
PrimitiveInstanceData::from(instance),
);
}
PictureCompositeMode::Puppet { .. } |
PictureCompositeMode::MixBlend { .. } |
PictureCompositeMode::Blit(_) => {
let surface = ctx.surfaces[raster_config.surface_index.0]
.surface

View File

@ -11,7 +11,7 @@ use print_tree::{PrintableTree, PrintTree, PrintTreePrinter};
use scene::SceneProperties;
use spatial_node::{ScrollFrameInfo, SpatialNode, SpatialNodeType, StickyFrameInfo, ScrollFrameKind};
use std::ops;
use util::{LayoutToWorldFastTransform, MatrixHelpers, ScaleOffset};
use util::{project_rect, LayoutToWorldFastTransform, MatrixHelpers, ScaleOffset};
pub type ScrollStates = FastHashMap<ExternalScrollId, ScrollFrameInfo>;
@ -211,6 +211,35 @@ impl ClipScrollTree {
})
}
/// Map a rectangle in some child space to a parent.
/// Doesn't handle preserve-3d islands.
pub fn map_rect_to_parent_space(
&self,
mut rect: LayoutRect,
child_index: SpatialNodeIndex,
parent_index: SpatialNodeIndex,
parent_bounds: &LayoutRect,
) -> Option<LayoutRect> {
if child_index == parent_index {
return Some(rect);
}
assert!(child_index.0 > parent_index.0);
let child = &self.spatial_nodes[child_index.0 as usize];
let parent = &self.spatial_nodes[parent_index.0 as usize];
let mut coordinate_system_id = child.coordinate_system_id;
rect = child.coordinate_system_relative_scale_offset.map_rect(&rect);
while coordinate_system_id != parent.coordinate_system_id {
let coord_system = &self.coord_systems[coordinate_system_id.0 as usize];
coordinate_system_id = coord_system.parent.expect("invalid parent!");
rect = project_rect(&coord_system.transform, &rect, parent_bounds)?;
}
Some(parent.coordinate_system_relative_scale_offset.unmap_rect(&rect))
}
/// Returns true if the spatial node is the same as the parent, or is
/// a child of the parent.
pub fn is_same_or_child_of(

View File

@ -22,7 +22,7 @@ use image::simplify_repeated_primitive;
use intern::{Handle, Internable, InternDebug};
use internal_types::{FastHashMap, FastHashSet};
use picture::{Picture3DContext, PictureCompositeMode, PicturePrimitive, PictureOptions};
use picture::{BlitReason, PrimitiveList, TileCache};
use picture::{BlitReason, OrderedPictureChild, PrimitiveList, TileCache};
use prim_store::{PrimitiveInstance, PrimitiveKeyKind, PrimitiveSceneData};
use prim_store::{PrimitiveInstanceKind, NinePatchDescriptor, PrimitiveStore};
use prim_store::{PrimitiveStoreStats, ScrollNodeAndClipChain, PictureIndex};
@ -1258,23 +1258,44 @@ impl<'a> DisplayListFlattener<'a> {
None
};
// Get the transform-style of the parent stacking context,
// Figure out if the parent is in 3D context,
// which determines if we *might* need to draw this on
// an intermediate surface for plane splitting purposes.
let (parent_is_3d, extra_3d_instance) = match self.sc_stack.last_mut() {
Some(sc) => {
// Cut the sequence of flat children before starting a child stacking context,
// so that the relative order between them and our current SC is preserved.
let extra_instance = sc.cut_flat_item_sequence(
let (parent_is_3d, extra_3d_picture, backdrop_picture) = match self.sc_stack.last_mut() {
Some(ref mut sc) if composite_ops.mix_blend_mode.is_some() => {
// Cut the sequence of children before starting a mix-blend stacking context,
// so that we have a source picture for applying the blending operator.
let backdrop_picture = sc.cut_item_sequence(
&mut self.prim_store,
&mut self.interners,
PictureCompositeMode::Puppet { master: None },
Picture3DContext::Out,
);
(sc.is_3d(), extra_instance)
},
None => (false, None),
(false, None, backdrop_picture)
}
Some(ref mut sc) if sc.is_3d() => {
let flat_items_context_3d = match sc.context_3d {
Picture3DContext::In { ancestor_index, .. } => Picture3DContext::In {
root_data: None,
ancestor_index,
},
Picture3DContext::Out => panic!("Unexpected out of 3D context"),
};
// Cut the sequence of flat children before starting a child stacking context,
// so that the relative order between them and our current SC is preserved.
let extra_picture = sc.cut_item_sequence(
&mut self.prim_store,
&mut self.interners,
PictureCompositeMode::Blit(BlitReason::PRESERVE3D),
flat_items_context_3d,
);
(true, extra_picture, None)
}
Some(_) | None => (false, None, None),
};
if let Some(instance) = extra_3d_instance {
if let Some((_picture_index, instance)) = extra_3d_picture {
self.add_primitive_instance_to_3d_root(instance);
}
@ -1312,11 +1333,13 @@ impl<'a> DisplayListFlattener<'a> {
// has a clip node. In the future, we may decide during
// prepare step to skip the intermediate surface if the
// clip node doesn't affect the stacking context rect.
let blit_reason = if clip_chain_id == ClipChainId::NONE {
BlitReason::empty()
} else {
BlitReason::CLIP
};
let mut blit_reason = BlitReason::empty();
if clip_chain_id != ClipChainId::NONE {
blit_reason |= BlitReason::CLIP
}
if participating_in_3d_context {
blit_reason |= BlitReason::PRESERVE3D;
}
// Push the SC onto the stack, so we know how to handle things in
// pop_stacking_context.
@ -1329,6 +1352,7 @@ impl<'a> DisplayListFlattener<'a> {
clip_chain_id,
frame_output_pipeline_id,
composite_ops,
backdrop_picture,
blit_reason,
transform_style,
context_3d,
@ -1348,26 +1372,22 @@ impl<'a> DisplayListFlattener<'a> {
// (b) It's useful for the initial version of picture caching in gecko, by enabling
// is to just look for interesting scroll roots on the root stacking context,
// without having to consider cuts at stacking context boundaries.
let parent_is_empty = match self.sc_stack.last_mut() {
Some(parent_sc) => {
if stacking_context.is_redundant(
parent_sc,
self.clip_scroll_tree,
) {
// If the parent context primitives list is empty, it's faster
// to assign the storage of the popped context instead of paying
// the copying cost for extend.
if parent_sc.primitives.is_empty() {
parent_sc.primitives = stacking_context.primitives;
} else {
parent_sc.primitives.extend(stacking_context.primitives);
}
return;
if let Some(parent_sc) = self.sc_stack.last_mut() {
if stacking_context.is_redundant(
parent_sc,
self.clip_scroll_tree,
) {
// If the parent context primitives list is empty, it's faster
// to assign the storage of the popped context instead of paying
// the copying cost for extend.
if parent_sc.primitives.is_empty() {
parent_sc.primitives = stacking_context.primitives;
} else {
parent_sc.primitives.extend(stacking_context.primitives);
}
parent_sc.primitives.is_empty()
},
None => true,
};
return;
}
}
if stacking_context.create_tile_cache {
self.setup_picture_caching(
@ -1383,7 +1403,16 @@ impl<'a> DisplayListFlattener<'a> {
// to correctly handle some CSS cases (see #1957).
let max_clip = LayoutRect::max_rect();
let (leaf_context_3d, leaf_composite_mode, leaf_output_pipeline_id) = match stacking_context.context_3d {
let leaf_composite_mode = if stacking_context.blit_reason.is_empty() {
// By default, this picture will be collapsed into
// the owning target.
None
} else {
// Add a dummy composite filter if the SC has to be isolated.
Some(PictureCompositeMode::Blit(stacking_context.blit_reason))
};
let leaf_context_3d = match stacking_context.context_3d {
// TODO(gw): For now, as soon as this picture is in
// a 3D context, we draw it to an intermediate
// surface and apply plane splitting. However,
@ -1391,25 +1420,17 @@ impl<'a> DisplayListFlattener<'a> {
// During culling, we can check if there is actually
// perspective present, and skip the plane splitting
// completely when that is not the case.
Picture3DContext::In { ancestor_index, .. } => (
Picture3DContext::In { root_data: None, ancestor_index },
Some(PictureCompositeMode::Blit(BlitReason::PRESERVE3D | stacking_context.blit_reason)),
None,
),
Picture3DContext::Out => (
Picture3DContext::Out,
if stacking_context.blit_reason.is_empty() {
// By default, this picture will be collapsed into
// the owning target.
None
} else {
// Add a dummy composite filter if the SC has to be isolated.
Some(PictureCompositeMode::Blit(stacking_context.blit_reason))
},
stacking_context.frame_output_pipeline_id
),
Picture3DContext::In { ancestor_index, .. } => {
assert_ne!(leaf_composite_mode, None);
Picture3DContext::In { root_data: None, ancestor_index }
}
Picture3DContext::Out => Picture3DContext::Out,
};
let leaf_prim_list = PrimitiveList::new(
stacking_context.primitives,
&self.interners,
);
// Add picture for this actual stacking context contents to render into.
let leaf_pic_index = PictureIndex(self.prim_store.pictures
.alloc()
@ -1417,13 +1438,10 @@ impl<'a> DisplayListFlattener<'a> {
leaf_composite_mode,
leaf_context_3d,
stacking_context.pipeline_id,
leaf_output_pipeline_id,
stacking_context.frame_output_pipeline_id,
true,
stacking_context.requested_raster_space,
PrimitiveList::new(
stacking_context.primitives,
&self.interners,
),
leaf_prim_list,
stacking_context.spatial_node_index,
max_clip,
None,
@ -1532,8 +1550,7 @@ impl<'a> DisplayListFlattener<'a> {
self.prim_store.optimize_picture_if_possible(current_pic_index);
}
// Same for mix-blend-mode, except we can skip if this primitive is the first in the parent
// stacking context.
// Same for mix-blend-mode, except we can skip if the backdrop doesn't have any primitives.
// From https://drafts.fxtf.org/compositing-1/#generalformula, the formula for blending is:
// Cs = (1 - ab) x Cs + ab x Blend(Cb, Cs)
// where
@ -1544,8 +1561,15 @@ impl<'a> DisplayListFlattener<'a> {
// If we're the first primitive within a stacking context, then we can guarantee that the
// backdrop alpha will be 0, and then the blend equation collapses to just
// Cs = Cs, and the blend mode isn't taken into account at all.
let has_mix_blend = if let (Some(mix_blend_mode), false) = (stacking_context.composite_ops.mix_blend_mode, parent_is_empty) {
let composite_mode = Some(PictureCompositeMode::MixBlend(mix_blend_mode));
if let (Some(mode), Some((backdrop, backdrop_instance))) = (stacking_context.composite_ops.mix_blend_mode, stacking_context.backdrop_picture.take()) {
let composite_mode = Some(PictureCompositeMode::MixBlend { mode, backdrop });
// We need to make the backdrop picture to be at the same level as the content,
// to be available as a source for composition...
if let Some(parent_sc) = self.sc_stack.last_mut() {
// Not actually rendered, due to `PictureCompositeMode::Puppet`, unless the blend picture is culled.
parent_sc.primitives.push(backdrop_instance);
}
let blend_pic_index = PictureIndex(self.prim_store.pictures
.alloc()
@ -1567,9 +1591,14 @@ impl<'a> DisplayListFlattener<'a> {
))
);
// Assoiate the backdrop picture with the blend.
self.prim_store.pictures[backdrop.0].requested_composite_mode = Some(PictureCompositeMode::Puppet {
master: Some(blend_pic_index),
});
current_pic_index = blend_pic_index;
cur_instance = create_prim_instance(
blend_pic_index,
current_pic_index,
composite_mode.into(),
stacking_context.is_backface_visible,
ClipChainId::NONE,
@ -1578,12 +1607,9 @@ impl<'a> DisplayListFlattener<'a> {
);
if cur_instance.is_chased() {
println!("\tis a mix-blend picture for a stacking context with {:?}", mix_blend_mode);
println!("\tis a mix-blend picture for a stacking context with {:?}", mode);
}
true
} else {
false
};
}
// Set the stacking context clip on the outermost picture in the chain,
// unless we already set it on the leaf picture.
@ -1598,13 +1624,6 @@ impl<'a> DisplayListFlattener<'a> {
}
// Regular parenting path
Some(ref mut parent_sc) => {
// If we have a mix-blend-mode, the stacking context needs to be isolated
// to blend correctly as per the CSS spec.
// If not already isolated for some other reason,
// make this picture as isolated.
if has_mix_blend {
parent_sc.blit_reason |= BlitReason::ISOLATE;
}
parent_sc.primitives.push(cur_instance);
None
}
@ -2631,6 +2650,9 @@ struct FlattenedStackingContext {
/// stacking context.
composite_ops: CompositeOps,
/// For a mix-blend stacking context, specify the picture index for backdrop.
backdrop_picture: Option<(PictureIndex, PrimitiveInstance)>,
/// Bitfield of reasons this stacking context needs to
/// be an offscreen surface.
blit_reason: BlitReason,
@ -2673,7 +2695,8 @@ impl FlattenedStackingContext {
// We can skip mix-blend modes if they are the first primitive in a stacking context,
// see pop_stacking_context for a full explanation.
if !self.composite_ops.mix_blend_mode.is_none() &&
!parent.primitives.is_empty() {
!self.backdrop_picture.is_none()
{
return false;
}
@ -2711,29 +2734,24 @@ impl FlattenedStackingContext {
true
}
/// For a Preserve3D context, cut the sequence of the immediate flat children
/// Cut the sequence of the immediate children of a stacking context
/// recorded so far and generate a picture from them.
pub fn cut_flat_item_sequence(
fn cut_item_sequence(
&mut self,
prim_store: &mut PrimitiveStore,
interners: &mut Interners,
) -> Option<PrimitiveInstance> {
if !self.is_3d() || self.primitives.is_empty() {
composite_mode: PictureCompositeMode,
context_3d: Picture3DContext<OrderedPictureChild>,
) -> Option<(PictureIndex, PrimitiveInstance)> {
if self.primitives.is_empty() {
return None
}
let flat_items_context_3d = match self.context_3d {
Picture3DContext::In { ancestor_index, .. } => Picture3DContext::In {
root_data: None,
ancestor_index,
},
Picture3DContext::Out => panic!("Unexpected out of 3D context"),
};
let pic_index = PictureIndex(prim_store.pictures
.alloc()
.init(PicturePrimitive::new_image(
Some(PictureCompositeMode::Blit(BlitReason::PRESERVE3D)),
flat_items_context_3d,
Some(composite_mode),
context_3d,
self.pipeline_id,
None,
true,
@ -2758,7 +2776,7 @@ impl FlattenedStackingContext {
interners,
);
Some(prim_instance)
Some((pic_index, prim_instance))
}
}

View File

@ -1870,12 +1870,10 @@ bitflags! {
/// A set of flags describing why a picture may need a backing surface.
#[cfg_attr(feature = "capture", derive(Serialize))]
pub struct BlitReason: u32 {
/// Mix-blend-mode on a child that requires isolation.
const ISOLATE = 1;
/// Clip node that _might_ require a surface.
const CLIP = 2;
const CLIP = 1;
/// Preserve-3D requires a surface for plane-splitting.
const PRESERVE3D = 4;
const PRESERVE3D = 2;
}
}
@ -1885,8 +1883,20 @@ bitflags! {
#[derive(Debug, Copy, Clone, PartialEq)]
#[cfg_attr(feature = "capture", derive(Serialize))]
pub enum PictureCompositeMode {
/// Don't composite this picture in a standard way,
/// can be used for pictures that need to be isolated but used
/// manually, e.g. for the backdrop of mix-blend pictures.
Puppet {
/// The master picture that actually handles compositing
/// of this one. If that picture turns out to be invisible,
/// the puppet mode becomes a regular blit.
master: Option<PictureIndex>,
},
/// Apply CSS mix-blend-mode effect.
MixBlend(MixBlendMode),
MixBlend {
mode: MixBlendMode,
backdrop: PictureIndex,
},
/// Apply a CSS filter.
Filter(FilterOp),
/// Draw to intermediate surface, copy straight across. This
@ -3060,37 +3070,6 @@ impl PicturePrimitive {
PictureSurface::RenderTask(render_task_id)
}
PictureCompositeMode::MixBlend(..) => {
let uv_rect_kind = calculate_uv_rect_kind(
&pic_rect,
&transform,
&clipped,
device_pixel_scale,
true,
);
let picture_task = RenderTask::new_picture(
RenderTaskLocation::Dynamic(None, clipped.size),
unclipped.size,
pic_index,
clipped.origin,
child_tasks,
uv_rect_kind,
pic_context.raster_spatial_node_index,
device_pixel_scale,
);
let readback_task_id = frame_state.render_tasks.add(
RenderTask::new_readback(clipped)
);
self.secondary_render_task_id = Some(readback_task_id);
surfaces[surface_index.0].tasks.push(readback_task_id);
let render_task_id = frame_state.render_tasks.add(picture_task);
surfaces[surface_index.0].tasks.push(render_task_id);
PictureSurface::RenderTask(render_task_id)
}
PictureCompositeMode::Filter(filter) => {
if let FilterOp::ColorMatrix(m) = filter {
if let Some(mut request) = frame_state.gpu_cache.request(&mut self.extra_gpu_data_handle) {
@ -3123,11 +3102,13 @@ impl PicturePrimitive {
surfaces[surface_index.0].tasks.push(render_task_id);
PictureSurface::RenderTask(render_task_id)
}
PictureCompositeMode::Puppet { .. } |
PictureCompositeMode::MixBlend { .. } |
PictureCompositeMode::Blit(_) => {
// The SplitComposite shader used for 3d contexts doesn't snap
// to pixels, so we shouldn't snap our uv coordinates either.
let supports_snapping = match self.context_3d {
Picture3DContext::In{ .. } => false,
Picture3DContext::In { .. } => false,
_ => true,
};

View File

@ -62,7 +62,7 @@ pub enum PictureCompositeKey {
impl From<Option<PictureCompositeMode>> for PictureCompositeKey {
fn from(mode: Option<PictureCompositeMode>) -> Self {
match mode {
Some(PictureCompositeMode::MixBlend(mode)) => {
Some(PictureCompositeMode::MixBlend { mode, .. }) => {
match mode {
MixBlendMode::Normal => PictureCompositeKey::Identity,
MixBlendMode::Multiply => PictureCompositeKey::Multiply,
@ -117,6 +117,7 @@ impl From<Option<PictureCompositeMode>> for PictureCompositeKey {
}
}
}
Some(PictureCompositeMode::Puppet { .. }) |
Some(PictureCompositeMode::Blit(_)) |
Some(PictureCompositeMode::TileCache { .. }) |
None => {

View File

@ -407,7 +407,6 @@ pub enum RenderTaskKind {
HorizontalBlur(BlurTask),
#[allow(dead_code)]
Glyph(GlyphTask),
Readback(DeviceIntRect),
Scaling(ScalingTask),
Blit(BlitTask),
Border(BorderTask),
@ -496,15 +495,6 @@ impl RenderTask {
}
}
pub fn new_readback(screen_rect: DeviceIntRect) -> Self {
RenderTask::with_dynamic_location(
screen_rect.size,
Vec::new(),
RenderTaskKind::Readback(screen_rect),
ClearMode::Transparent,
)
}
pub fn new_blit(
size: DeviceIntSize,
source: BlitSource,
@ -854,8 +844,7 @@ impl RenderTask {
fn uv_rect_kind(&self) -> UvRectKind {
match self.kind {
RenderTaskKind::CacheMask(..) |
RenderTaskKind::Readback(..) => {
RenderTaskKind::CacheMask(..) => {
unreachable!("bug: unexpected render task");
}
@ -928,7 +917,6 @@ impl RenderTask {
RenderTaskKind::Glyph(_) => {
[0.0, 1.0, 0.0]
}
RenderTaskKind::Readback(..) |
RenderTaskKind::Scaling(..) |
RenderTaskKind::Border(..) |
RenderTaskKind::LineDecoration(..) |
@ -969,7 +957,6 @@ impl RenderTask {
gpu_cache.get_address(&info.uv_rect_handle)
}
RenderTaskKind::ClipRegion(..) |
RenderTaskKind::Readback(..) |
RenderTaskKind::Scaling(..) |
RenderTaskKind::Blit(..) |
RenderTaskKind::Border(..) |
@ -1022,8 +1009,6 @@ impl RenderTask {
pub fn target_kind(&self) -> RenderTargetKind {
match self.kind {
RenderTaskKind::Readback(..) => RenderTargetKind::Color,
RenderTaskKind::LineDecoration(..) => RenderTargetKind::Color,
RenderTaskKind::ClipRegion(..) |
@ -1076,7 +1061,6 @@ impl RenderTask {
RenderTaskKind::Picture(ref mut info) => {
(&mut info.uv_rect_handle, info.uv_rect_kind)
}
RenderTaskKind::Readback(..) |
RenderTaskKind::Scaling(..) |
RenderTaskKind::Blit(..) |
RenderTaskKind::ClipRegion(..) |
@ -1127,10 +1111,6 @@ impl RenderTask {
pt.new_level("HorizontalBlur".to_owned());
task.print_with(pt);
}
RenderTaskKind::Readback(ref rect) => {
pt.new_level("Readback".to_owned());
pt.add_item(format!("rect: {:?}", rect));
}
RenderTaskKind::Scaling(ref kind) => {
pt.new_level("Scaling".to_owned());
pt.add_item(format!("kind: {:?}", kind));

View File

@ -70,7 +70,7 @@ use render_backend::{FrameId, RenderBackend};
use scene_builder::{SceneBuilder, LowPrioritySceneBuilder};
use shade::{Shaders, WrShaders};
use smallvec::SmallVec;
use render_task::{RenderTask, RenderTaskKind, RenderTaskTree};
use render_task::RenderTaskTree;
use resource_cache::ResourceCache;
use util::drain_filter;
@ -2311,11 +2311,6 @@ impl Renderer {
"Scalings",
target.scalings.len(),
);
debug_target.add(
debug_server::BatchKind::Cache,
"Readbacks",
target.readbacks.len(),
);
debug_target.add(
debug_server::BatchKind::Cache,
"Vertical Blur",
@ -3027,74 +3022,6 @@ impl Renderer {
self.profile_counters.vertices.add(6 * data.len());
}
fn handle_readback_composite(
&mut self,
draw_target: DrawTarget,
uses_scissor: bool,
source: &RenderTask,
backdrop: &RenderTask,
readback: &RenderTask,
) {
if uses_scissor {
self.device.disable_scissor();
}
let cache_texture = self.texture_resolver
.resolve(&TextureSource::PrevPassColor)
.unwrap();
// Before submitting the composite batch, do the
// framebuffer readbacks that are needed for each
// composite operation in this batch.
let (readback_rect, readback_layer) = readback.get_target_rect();
let (backdrop_rect, _) = backdrop.get_target_rect();
let backdrop_screen_origin = match backdrop.kind {
RenderTaskKind::Picture(ref task_info) => task_info.content_origin,
_ => panic!("bug: composite on non-picture?"),
};
let source_screen_origin = match source.kind {
RenderTaskKind::Picture(ref task_info) => task_info.content_origin,
_ => panic!("bug: composite on non-picture?"),
};
// Bind the FBO to blit the backdrop to.
// Called per-instance in case the layer (and therefore FBO)
// changes. The device will skip the GL call if the requested
// target is already bound.
let cache_draw_target = DrawTarget::Texture {
texture: cache_texture,
layer: readback_layer.0 as usize,
with_depth: false,
};
self.device.bind_draw_target(cache_draw_target);
let mut src = DeviceIntRect::new(
source_screen_origin + (backdrop_rect.origin - backdrop_screen_origin),
readback_rect.size,
);
let mut dest = readback_rect.to_i32();
// Need to invert the y coordinates and flip the image vertically when
// reading back from the framebuffer.
if draw_target.is_default() {
src.origin.y = draw_target.dimensions().height as i32 - src.size.height - src.origin.y;
dest.origin.y += dest.size.height;
dest.size.height = -dest.size.height;
}
self.device.bind_read_target(draw_target.into());
self.device.blit_render_target(src, dest, TextureFilter::Linear);
// Restore draw target to current pass render target + layer, and reset
// the read target.
self.device.bind_draw_target(draw_target);
self.device.reset_read_target();
if uses_scissor {
self.device.enable_scissor();
}
}
fn handle_blits(
&mut self,
blits: &[BlitJob],
@ -3420,20 +3347,6 @@ impl Renderer {
prev_blend_mode = batch.key.blend_mode;
}
// Handle special case readback for composites.
if let BatchKind::Brush(BrushBatchKind::MixBlend { task_id, source_id, backdrop_id }) = batch.key.kind {
// composites can't be grouped together because
// they may overlap and affect each other.
debug_assert_eq!(batch.instances.len(), 1);
self.handle_readback_composite(
draw_target,
uses_scissor,
&render_tasks[source_id],
&render_tasks[task_id],
&render_tasks[backdrop_id],
);
}
let _timer = self.gpu_profile.start_timer(batch.key.kind.sampler_tag());
iterate_regions(

View File

@ -20,7 +20,7 @@ use internal_types::{CacheTextureId, FastHashMap, SavedTargetIndex, TextureSourc
#[cfg(feature = "pathfinder")]
use pathfinder_partitioner::mesh::Mesh;
use picture::{RecordedDirtyRegion, SurfaceInfo};
use prim_store::{PrimitiveStore, DeferredResolve, PrimitiveScratchBuffer};
use prim_store::{PictureIndex, PrimitiveStore, DeferredResolve, PrimitiveScratchBuffer};
use profiler::FrameProfileCounters;
use render_backend::{DataStores, FrameId};
use render_task::{BlitSource, RenderTaskAddress, RenderTaskId, RenderTaskKind};
@ -62,6 +62,18 @@ pub struct RenderTargetContext<'a, 'rc> {
pub globals: &'a FrameGlobalResources,
}
impl<'a, 'rc> RenderTargetContext<'a, 'rc> {
/// Returns true if a picture has a surface that is visible.
pub fn is_picture_surface_visible(&self, index: PictureIndex) -> bool {
match self.prim_store.pictures[index.0].raster_config {
Some(ref raster_config) => {
self.surfaces[raster_config.surface_index.0].surface.is_some()
}
None => false,
}
}
}
/// Represents a number of rendering operations on a surface.
///
/// In graphics parlance, a "render target" usually means "a surface (texture or
@ -348,7 +360,6 @@ pub struct ColorRenderTarget {
// List of blur operations to apply for this render target.
pub vertical_blurs: Vec<BlurInstance>,
pub horizontal_blurs: Vec<BlurInstance>,
pub readbacks: Vec<DeviceIntRect>,
pub scalings: Vec<ScalingInstance>,
pub blits: Vec<BlitJob>,
// List of frame buffer outputs for this render target.
@ -370,7 +381,6 @@ impl RenderTarget for ColorRenderTarget {
alpha_batch_containers: Vec::new(),
vertical_blurs: Vec::new(),
horizontal_blurs: Vec::new(),
readbacks: Vec::new(),
scalings: Vec::new(),
blits: Vec::new(),
outputs: Vec::new(),
@ -502,9 +512,6 @@ impl RenderTarget for ColorRenderTarget {
// FIXME(pcwalton): Support color glyphs.
panic!("Glyphs should not be added to color target!");
}
RenderTaskKind::Readback(device_rect) => {
self.readbacks.push(device_rect);
}
RenderTaskKind::Scaling(..) => {
self.scalings.push(ScalingInstance {
task_address: render_tasks.get_task_address(task_id),
@ -637,7 +644,6 @@ impl RenderTarget for AlphaRenderTarget {
}
match task.kind {
RenderTaskKind::Readback(..) |
RenderTaskKind::Picture(..) |
RenderTaskKind::Blit(..) |
RenderTaskKind::Border(..) |
@ -819,7 +825,6 @@ impl TextureCacheRenderTarget {
RenderTaskKind::Picture(..) |
RenderTaskKind::ClipRegion(..) |
RenderTaskKind::CacheMask(..) |
RenderTaskKind::Readback(..) |
RenderTaskKind::Scaling(..) => {
panic!("BUG: unexpected task kind for texture cache target");
}

View File

@ -1,13 +1,14 @@
# test that we handle the backdrop size to be smaller than the source
---
root:
items:
- type: rect
bounds: [0, 0, 100, 100]
color: [0, 255, 0]
bounds: [25, 25, 50, 50]
color: green
- type: stacking-context
bounds: [0, 0, 100, 100]
mix-blend-mode: multiply
items:
- type: rect
bounds: [0, 0, 100, 100]
color: [255, 128, 0]
color: green

View File

@ -8,10 +8,6 @@ root:
bounds: [0, 0, 100, 100]
mix-blend-mode: multiply
items:
- type: stacking-context
- type: rect
bounds: [0, 0, 100, 100]
mix-blend-mode: multiply
items:
- type: rect
bounds: [0, 0, 100, 100]
color: [255, 128, 0]
color: [255, 128, 0]

View File

@ -0,0 +1,17 @@
---
root:
items:
- type: rect
bounds: [0, 0, 100, 100]
color: [0, 255, 0]
- type: stacking-context
bounds: [0, 0, 100, 100]
mix-blend-mode: multiply
items:
- type: stacking-context
bounds: [0, 0, 100, 100]
mix-blend-mode: multiply
items:
- type: rect
bounds: [0, 0, 100, 100]
color: [255, 128, 0]

View File

@ -1,6 +1,7 @@
== multiply.yaml multiply-ref.yaml
== multiply-2.yaml multiply-2-ref.yaml
== color_targets(3) alpha_targets(0) multiply-3.yaml multiply-2-ref.yaml
== multiply-2.yaml multiply-ref.yaml
== multiply-3.yaml multiply-3-ref.yaml
== color_targets(2) alpha_targets(0) multiply-4.yaml multiply-3-ref.yaml
== difference.yaml difference-ref.yaml
fuzzy(1,10000) == difference-transparent.yaml difference-transparent-ref.yaml
== darken.yaml darken-ref.yaml
@ -22,3 +23,4 @@ fuzzy(1,2502) == transparent-composite-1.yaml transparent-composite-1-ref.yaml
fuzzy(1,2502) == transparent-composite-2.yaml transparent-composite-2-ref.yaml
== multi-mix-blend-mode.yaml multi-mix-blend-mode-ref.yaml
fuzzy(50,5) == transform-source.yaml transform-source-ref.yaml

View File

@ -0,0 +1,13 @@
---
root:
items:
- type: stacking-context
bounds: [0, 0, 0, 0]
transform: rotate-z(60)
items:
- type: rect
bounds: [25, -100, 150, 150]
color: blue
- type: rect
bounds: [25, 25, 100, 100]
color: black

View File

@ -0,0 +1,15 @@
# test that we handle the source stacking context being transformed
---
root:
items:
- type: rect
bounds: [25, 25, 100, 100]
color: green
- type: stacking-context
bounds: [0, 0, 0, 0]
mix-blend-mode: multiply
transform: rotate-z(60)
items:
- type: rect
bounds: [25, -100, 150, 150]
color: blue

View File

@ -533,9 +533,10 @@ fn render<'a>(
subargs: &clap::ArgMatches<'a>,
) {
let input_path = subargs.value_of("INPUT").map(PathBuf::from).unwrap();
let mut show_stats = false;
// If the input is a directory, we are looking at a capture.
let mut thing = if input_path.as_path().is_dir() {
let mut thing = if input_path.is_dir() {
let mut documents = wrench.api.load_capture(input_path);
println!("loaded {:?}", documents.iter().map(|cd| cd.document_id).collect::<Vec<_>>());
let captured = documents.swap_remove(0);
@ -548,6 +549,7 @@ fn render<'a>(
.expect("Tried to render with an unknown file type.")
.to_str()
.expect("Tried to render with an unknown file type.");
show_stats = true; // show when invoked on single files
match extension {
"yaml" => Box::new(YamlFrameReader::new_from_args(subargs)) as Box<WrenchThing>,
@ -747,9 +749,14 @@ fn render<'a>(
wrench.show_onscreen_help();
}
wrench.render();
let results = wrench.render();
window.swap_buffers();
if show_stats {
show_stats = false;
println!("{:#?}", results.stats);
}
if do_loop {
thing.next_frame();
}

View File

@ -1083,6 +1083,9 @@ static void DoApplyRenderingChangeToTree(nsIFrame* aFrame,
}
if ((aChange & nsChangeHint_UpdateTransformLayer) &&
aFrame->IsTransformed()) {
// Note: All the transform-like properties should map to the same
// layer activity index, so does the restyle count. Therefore, using
// eCSSProperty_transform should be fine.
ActiveLayerTracker::NotifyRestyle(aFrame, eCSSProperty_transform);
// If we're not already going to do an invalidating paint, see
// if we can get away with only updating the transform on a
@ -1107,6 +1110,9 @@ static void DoApplyRenderingChangeToTree(nsIFrame* aFrame,
->PrincipalChildList()
.FirstChild();
for (; childFrame; childFrame = childFrame->GetNextSibling()) {
// Note: All the transform-like properties should map to the same
// layer activity index, so does the restyle count. Therefore, using
// eCSSProperty_transform should be fine.
ActiveLayerTracker::NotifyRestyle(childFrame, eCSSProperty_transform);
}
}

View File

@ -245,49 +245,39 @@ static bool MayHaveAnimationOfPropertySet(
return aTarget->MayHaveTransformAnimation();
}
bool nsLayoutUtils::HasAnimationOfPropertySet(
EffectSet* aEffectSet, const nsCSSPropertyIDSet& aPropertySet) {
if (!aEffectSet || !MayHaveAnimationOfPropertySet(aEffectSet, aPropertySet)) {
template <typename EffectSetOrFrame>
static bool HasAnimationOfPropertySetImpl(
EffectSetOrFrame* aTarget, const nsCSSPropertyIDSet& aPropertySet) {
if (!aTarget || !MayHaveAnimationOfPropertySet(aTarget, aPropertySet)) {
return false;
}
return HasMatchingAnimations(
aEffectSet, [&aPropertySet](KeyframeEffect& aEffect) {
aTarget, [&aPropertySet](KeyframeEffect& aEffect) {
return (aEffect.IsInEffect() || aEffect.IsCurrent()) &&
aEffect.HasAnimationOfPropertySet(aPropertySet);
});
}
bool nsLayoutUtils::HasAnimationOfPropertySet(
const nsIFrame* aFrame, const nsCSSPropertyIDSet& aPropertySet) {
if (!MayHaveAnimationOfPropertySet(aFrame, aPropertySet)) {
return false;
}
EffectSet* aEffectSet, const nsCSSPropertyIDSet& aPropertySet) {
return HasAnimationOfPropertySetImpl(aEffectSet, aPropertySet);
}
return HasMatchingAnimations(
aFrame, [&aPropertySet](KeyframeEffect& aEffect) {
return (aEffect.IsInEffect() || aEffect.IsCurrent()) &&
aEffect.HasAnimationOfPropertySet(aPropertySet);
});
bool nsLayoutUtils::HasAnimationOfPropertySet(
const nsIFrame* aFrame, const nsCSSPropertyIDSet& aPropertySet) {
return HasAnimationOfPropertySetImpl(aFrame, aPropertySet);
}
bool nsLayoutUtils::HasEffectiveAnimation(const nsIFrame* aFrame,
nsCSSPropertyID aProperty) {
EffectSet* effects = EffectSet::GetEffectSet(aFrame);
// This function isn't called by opacity or transform, so we don't have to
// check MayHaveAnimationOfPropertySet.
if (!effects) {
return false;
}
if (nsCSSPropertyIDSet::TransformLikeProperties().HasProperty(aProperty) &&
!effects->MayHaveTransformAnimation()) {
return false;
}
if (aProperty == eCSSProperty_opacity &&
!effects->MayHaveOpacityAnimation()) {
return false;
}
return HasMatchingAnimations(
effects, [&aProperty, &effects](KeyframeEffect& aEffect) {
return (aEffect.IsInEffect() || aEffect.IsCurrent()) &&
@ -295,6 +285,20 @@ bool nsLayoutUtils::HasEffectiveAnimation(const nsIFrame* aFrame,
});
}
bool nsLayoutUtils::HasEffectiveAnimation(
const nsIFrame* aFrame, const nsCSSPropertyIDSet& aPropertySet) {
EffectSet* effects = EffectSet::GetEffectSet(aFrame);
if (!effects || !MayHaveAnimationOfPropertySet(aFrame, aPropertySet)) {
return false;
}
return HasMatchingAnimations(effects, [&aPropertySet,
&effects](KeyframeEffect& aEffect) {
return (aEffect.IsInEffect() || aEffect.IsCurrent()) &&
aEffect.HasEffectiveAnimationOfPropertySet(aPropertySet, *effects);
});
}
/* static */ nsCSSPropertyIDSet
nsLayoutUtils::GetAnimationPropertiesForCompositor(const nsIFrame* aFrame) {
nsCSSPropertyIDSet properties;

View File

@ -21,7 +21,7 @@
#include "nsThreadUtils.h"
#include "nsIPrincipal.h"
#include "nsIWidget.h"
#include "nsCSSPropertyID.h"
#include "nsCSSPropertyIDSet.h"
#include "nsStyleCoord.h"
#include "nsStyleConsts.h"
#include "nsGkAtoms.h"
@ -2286,13 +2286,12 @@ class nsLayoutUtils {
const nsCSSPropertyIDSet& aPropertySet);
/**
* Returns true if |aEffectSet| has an animation of a property |aPropertySet|
* regardless of whether any property in the set is overridden by !important
* rule.
* Returns true if |aEffectSet| has an animation of a property in
* |aPropertySet| regardless of whether any property in the set is overridden
* by !important rule.
*/
static bool HasAnimationOfPropertySet(mozilla::EffectSet* aEffectSet,
const nsCSSPropertyIDSet& aPropertySet);
/**
* Returns true if |aFrame| has an animation of |aProperty| which is
* not overridden by !important rules.
@ -2300,6 +2299,13 @@ class nsLayoutUtils {
static bool HasEffectiveAnimation(const nsIFrame* aFrame,
nsCSSPropertyID aProperty);
/**
* Returns true if |aFrame| has animations of properties in |aPropertySet|,
* and all of these properties are not overridden by !important rules.
*/
static bool HasEffectiveAnimation(const nsIFrame* aFrame,
const nsCSSPropertyIDSet& aPropertySet);
/**
* Returns all effective animated CSS properties on |aFrame|. That means
* properties that can be animated on the compositor and are not overridden by

View File

@ -71,6 +71,12 @@ class LayerActivity {
case eCSSProperty_opacity:
return ACTIVITY_OPACITY;
case eCSSProperty_transform:
case eCSSProperty_translate:
case eCSSProperty_rotate:
case eCSSProperty_scale:
// TODO: Bug 1186329: Add motion-path into ActiveLayerTracker.
// Note: All transform-like properties are mapping to the same activity
// index.
return ACTIVITY_TRANSFORM;
case eCSSProperty_left:
return ACTIVITY_LEFT;
@ -92,6 +98,17 @@ class LayerActivity {
}
}
static ActivityIndex GetActivityIndexForPropertySet(
const nsCSSPropertyIDSet& aPropertySet) {
if (aPropertySet.Intersect(nsCSSPropertyIDSet::TransformLikeProperties())
.Equals(aPropertySet)) {
return ACTIVITY_TRANSFORM;
}
MOZ_ASSERT(aPropertySet.Intersect(nsCSSPropertyIDSet::OpacityProperties())
.Equals(aPropertySet));
return ACTIVITY_OPACITY;
}
// While tracked, exactly one of mFrame or mContent is non-null, depending
// on whether this property is stored on a frame or on a content node.
// When this property is expired by the layer activity tracker, both mFrame
@ -288,7 +305,7 @@ static void IncrementScaleRestyleCountIfNeeded(nsIFrame* aFrame,
uint8_t& mutationCount = layerActivity->RestyleCountForProperty(aProperty);
IncrementMutationCount(&mutationCount);
if (aProperty == eCSSProperty_transform) {
if (nsCSSPropertyIDSet::TransformLikeProperties().HasProperty(aProperty)) {
IncrementScaleRestyleCountIfNeeded(aFrame, layerActivity);
}
}
@ -379,18 +396,6 @@ static bool IsPresContextInScriptAnimationCallback(
}
}
/* static */ bool ActiveLayerTracker::IsStyleMaybeAnimated(
nsIFrame* aFrame, nsCSSPropertyID aProperty) {
return IsStyleAnimated(nullptr, aFrame, aProperty);
}
/* static */ bool ActiveLayerTracker::IsBackgroundPositionAnimated(
nsDisplayListBuilder* aBuilder, nsIFrame* aFrame) {
return IsStyleAnimated(aBuilder, aFrame,
eCSSProperty_background_position_x) ||
IsStyleAnimated(aBuilder, aFrame, eCSSProperty_background_position_y);
}
static bool CheckScrollInducedActivity(
LayerActivity* aLayerActivity, LayerActivity::ActivityIndex aActivityIndex,
nsDisplayListBuilder* aBuilder) {
@ -412,20 +417,67 @@ static bool CheckScrollInducedActivity(
return false;
}
/* static */ bool ActiveLayerTracker::IsBackgroundPositionAnimated(
nsDisplayListBuilder* aBuilder, nsIFrame* aFrame) {
LayerActivity* layerActivity = GetLayerActivity(aFrame);
if (layerActivity) {
LayerActivity::ActivityIndex activityIndex =
LayerActivity::ActivityIndex::ACTIVITY_BACKGROUND_POSITION;
if (layerActivity->mRestyleCounts[activityIndex] >= 2) {
// If the frame needs to be repainted frequently, we probably don't get
// much from treating the property as animated, *unless* this frame's
// 'scale' (which includes the bounds changes of a rotation) is changing.
// Marking a scaling transform as animating allows us to avoid resizing
// the texture, even if we have to repaint the contents of that texture.
if (layerActivity
->mRestyleCounts[LayerActivity::ACTIVITY_TRIGGERED_REPAINT] < 2) {
return true;
}
}
if (CheckScrollInducedActivity(layerActivity, activityIndex, aBuilder)) {
return true;
}
}
return nsLayoutUtils::HasEffectiveAnimation(
aFrame, eCSSProperty_background_position_x) ||
nsLayoutUtils::HasEffectiveAnimation(
aFrame, eCSSProperty_background_position_y);
}
/* static */ bool ActiveLayerTracker::IsTransformAnimated(
nsDisplayListBuilder* aBuilder, nsIFrame* aFrame) {
return IsStyleAnimated(aBuilder, aFrame,
nsCSSPropertyIDSet::TransformLikeProperties());
}
/* static */ bool ActiveLayerTracker::IsTransformMaybeAnimated(
nsIFrame* aFrame) {
return IsStyleAnimated(nullptr, aFrame,
nsCSSPropertyIDSet::TransformLikeProperties());
}
/* static */ bool ActiveLayerTracker::IsStyleAnimated(
nsDisplayListBuilder* aBuilder, nsIFrame* aFrame,
nsCSSPropertyID aProperty) {
// TODO: Add some abuse restrictions
const nsCSSPropertyIDSet& aPropertySet) {
MOZ_ASSERT(
aPropertySet.Intersect(nsCSSPropertyIDSet::TransformLikeProperties())
.Equals(aPropertySet) ||
aPropertySet.Intersect(nsCSSPropertyIDSet::OpacityProperties())
.Equals(aPropertySet),
"Only subset of opacity or transform-like properties set calls this");
const nsCSSPropertyIDSet transformSet =
nsCSSPropertyIDSet::TransformLikeProperties();
if ((aFrame->StyleDisplay()->mWillChangeBitField &
NS_STYLE_WILL_CHANGE_TRANSFORM) &&
aProperty == eCSSProperty_transform &&
aPropertySet.Intersects(transformSet) &&
(!aBuilder ||
aBuilder->IsInWillChangeBudget(aFrame, aFrame->GetSize()))) {
return true;
}
if ((aFrame->StyleDisplay()->mWillChangeBitField &
NS_STYLE_WILL_CHANGE_OPACITY) &&
aProperty == eCSSProperty_opacity &&
aPropertySet.Intersects(nsCSSPropertyIDSet::OpacityProperties()) &&
(!aBuilder ||
aBuilder->IsInWillChangeBudget(aFrame, aFrame->GetSize()))) {
return true;
@ -434,7 +486,7 @@ static bool CheckScrollInducedActivity(
LayerActivity* layerActivity = GetLayerActivity(aFrame);
if (layerActivity) {
LayerActivity::ActivityIndex activityIndex =
LayerActivity::GetActivityIndexForProperty(aProperty);
LayerActivity::GetActivityIndexForPropertySet(aPropertySet);
if (layerActivity->mRestyleCounts[activityIndex] >= 2) {
// If the frame needs to be repainted frequently, we probably don't get
// much from treating the property as animated, *unless* this frame's
@ -444,7 +496,7 @@ static bool CheckScrollInducedActivity(
if (layerActivity
->mRestyleCounts[LayerActivity::ACTIVITY_TRIGGERED_REPAINT] <
2 ||
(aProperty == eCSSProperty_transform &&
(aPropertySet.Intersects(transformSet) &&
IsScaleSubjectToAnimation(aFrame))) {
return true;
}
@ -453,11 +505,11 @@ static bool CheckScrollInducedActivity(
return true;
}
}
if (aProperty == eCSSProperty_transform &&
if (aPropertySet.Intersects(transformSet) &&
aFrame->Combines3DTransformWithAncestors()) {
return IsStyleAnimated(aBuilder, aFrame->GetParent(), aProperty);
return IsStyleAnimated(aBuilder, aFrame->GetParent(), aPropertySet);
}
return nsLayoutUtils::HasEffectiveAnimation(aFrame, aProperty);
return nsLayoutUtils::HasEffectiveAnimation(aFrame, aPropertySet);
}
/* static */ bool ActiveLayerTracker::IsOffsetStyleAnimated(nsIFrame* aFrame) {

View File

@ -11,6 +11,7 @@
class nsIFrame;
class nsIContent;
class nsCSSPropertyIDSet;
class nsDisplayListBuilder;
class nsDOMCSSDeclaration;
@ -28,7 +29,8 @@ class ActiveLayerTracker {
/*
* We track style changes to selected styles:
* eCSSProperty_transform
* eCSSProperty_transform, eCSSProperty_translate,
* eCSSProperty_rotate, eCSSProperty_scale
* eCSSProperty_opacity
* eCSSProperty_left, eCSSProperty_top,
* eCSSProperty_right, eCSSProperty_bottom
@ -84,16 +86,11 @@ class ActiveLayerTracker {
*/
static void NotifyNeedsRepaint(nsIFrame* aFrame);
/**
* Return true if aFrame's aProperty style should be considered as being
* animated for pre-rendering.
*/
static bool IsStyleMaybeAnimated(nsIFrame* aFrame, nsCSSPropertyID aProperty);
/**
* Return true if aFrame's aProperty style should be considered as being
* animated for constructing active layers.
* Return true if aFrame's property style in |aPropertySet| should be
* considered as being animated for constructing active layers.
*/
static bool IsStyleAnimated(nsDisplayListBuilder* aBuilder, nsIFrame* aFrame,
nsCSSPropertyID aProperty);
const nsCSSPropertyIDSet& aPropertySet);
/**
* Return true if any of aFrame's offset property styles should be considered
* as being animated for constructing active layers.
@ -105,6 +102,17 @@ class ActiveLayerTracker {
*/
static bool IsBackgroundPositionAnimated(nsDisplayListBuilder* aBuilder,
nsIFrame* aFrame);
/**
* Return true if aFrame's transform-like property,
* i.e. transform/translate/rotate/scale, is animated.
*/
static bool IsTransformAnimated(nsDisplayListBuilder* aBuilder,
nsIFrame* aFrame);
/**
* Return true if aFrame's transform style should be considered as being
* animated for pre-rendering.
*/
static bool IsTransformMaybeAnimated(nsIFrame* aFrame);
/**
* Return true if aFrame either has an animated scale now, or is likely to
* have one in the future because it has a CSS animation or transition

View File

@ -6162,8 +6162,8 @@ static bool ChooseScaleAndSetTransform(
aIncomingScale);
if (aTransform) {
aOutgoingScale.mInTransformedSubtree = true;
if (ActiveLayerTracker::IsStyleAnimated(
aDisplayListBuilder, aContainerFrame, eCSSProperty_transform)) {
if (ActiveLayerTracker::IsTransformAnimated(aDisplayListBuilder,
aContainerFrame)) {
aOutgoingScale.mInActiveTransformedSubtree = true;
}
}

View File

@ -5829,8 +5829,8 @@ static bool IsItemTooSmallForActiveLayer(nsIFrame* aFrame) {
bool aEnforceMinimumSize) {
if (EffectCompositor::HasAnimationsForCompositor(aFrame,
eCSSProperty_opacity) ||
(ActiveLayerTracker::IsStyleAnimated(aBuilder, aFrame,
eCSSProperty_opacity) &&
(ActiveLayerTracker::IsStyleAnimated(
aBuilder, aFrame, nsCSSPropertyIDSet::OpacityProperties()) &&
!(aEnforceMinimumSize && IsItemTooSmallForActiveLayer(aFrame)))) {
return true;
}
@ -7602,8 +7602,8 @@ Matrix4x4 nsDisplayTransform::GetResultingTransformMatrixInternal(
}
bool nsDisplayOpacity::CanUseAsyncAnimations(nsDisplayListBuilder* aBuilder) {
if (ActiveLayerTracker::IsStyleAnimated(aBuilder, mFrame,
eCSSProperty_opacity)) {
if (ActiveLayerTracker::IsStyleAnimated(
aBuilder, mFrame, nsCSSPropertyIDSet::OpacityProperties())) {
return true;
}
@ -7633,8 +7633,7 @@ bool nsDisplayBackgroundColor::CanUseAsyncAnimations(
// have a compositor-animated transform, can be prerendered. An element
// might have only just had its transform animated in which case
// the ActiveLayerManager may not have been notified yet.
if (!ActiveLayerTracker::IsStyleMaybeAnimated(aFrame,
eCSSProperty_transform) &&
if (!ActiveLayerTracker::IsTransformMaybeAnimated(aFrame) &&
!EffectCompositor::HasAnimationsForCompositor(aFrame,
eCSSProperty_transform)) {
EffectCompositor::SetPerformanceWarning(
@ -7909,8 +7908,7 @@ bool nsDisplayTransform::CreateWebRenderCommands(
}
// Determine if we're possibly animated (= would need an active layer in FLB).
bool animated =
ActiveLayerTracker::IsStyleMaybeAnimated(Frame(), eCSSProperty_transform);
bool animated = ActiveLayerTracker::IsTransformMaybeAnimated(Frame());
wr::StackingContextParams params;
params.mBoundTransform = &newTransformMatrix;
@ -8022,14 +8020,13 @@ bool nsDisplayTransform::MayBeAnimated(nsDisplayListBuilder* aBuilder,
// If EffectCompositor::HasAnimationsForCompositor() is true then we can
// completely bypass the main thread for this animation, so it is always
// worthwhile.
// For ActiveLayerTracker::IsStyleAnimated() cases the main thread is
// For ActiveLayerTracker::IsTransformAnimated() cases the main thread is
// already involved so there is less to be gained.
// Therefore we check that the *post-transform* bounds of this item are
// big enough to justify an active layer.
if (EffectCompositor::HasAnimationsForCompositor(mFrame,
eCSSProperty_transform) ||
(ActiveLayerTracker::IsStyleAnimated(aBuilder, mFrame,
eCSSProperty_transform) &&
(ActiveLayerTracker::IsTransformAnimated(aBuilder, mFrame) &&
!(aEnforceMinimumSize && IsItemTooSmallForActiveLayer(mFrame)))) {
return true;
}

View File

@ -117,16 +117,13 @@ nscolor AnimationValue::GetColor(nscolor aForegroundColor) const {
already_AddRefed<const nsCSSValueSharedList> AnimationValue::GetTransformList()
const {
MOZ_ASSERT(mServo);
RefPtr<nsCSSValueSharedList> transform;
Servo_AnimationValue_GetTransform(mServo, &transform);
return transform.forget();
}
Size AnimationValue::GetScaleValue(const nsIFrame* aFrame) const {
MOZ_ASSERT(mServo);
RefPtr<nsCSSValueSharedList> list;
Servo_AnimationValue_GetTransform(mServo, &list);
RefPtr<const nsCSSValueSharedList> list = GetTransformList();
return nsStyleTransformMatrix::GetScaleValue(list, aFrame);
}

View File

@ -158,14 +158,17 @@ nsresult nsDOMCSSAttributeDeclaration::SetSMILValue(
nsresult nsDOMCSSAttributeDeclaration::SetPropertyValue(
const nsCSSPropertyID aPropID, const nsAString& aValue,
nsIPrincipal* aSubjectPrincipal) {
// Scripted modifications to style.opacity or style.transform
// Scripted modifications to style.opacity or style.transform (or other
// transform-like properties, e.g. style.translate, style.rotate, style.scale)
// could immediately force us into the animated state if heuristics suggest
// this is scripted animation.
// FIXME: This is missing the margin shorthand and the logical versions of
// the margin properties, see bug 1266287.
if (aPropID == eCSSProperty_opacity || aPropID == eCSSProperty_transform ||
aPropID == eCSSProperty_left || aPropID == eCSSProperty_top ||
aPropID == eCSSProperty_right || aPropID == eCSSProperty_bottom ||
aPropID == eCSSProperty_translate || aPropID == eCSSProperty_rotate ||
aPropID == eCSSProperty_scale || aPropID == eCSSProperty_left ||
aPropID == eCSSProperty_top || aPropID == eCSSProperty_right ||
aPropID == eCSSProperty_bottom ||
aPropID == eCSSProperty_background_position_x ||
aPropID == eCSSProperty_background_position_y ||
aPropID == eCSSProperty_background_position) {

View File

@ -53,6 +53,9 @@ nsresult nsDOMCSSDeclaration::SetPropertyValue(
case eCSSProperty_background_position_x:
case eCSSProperty_background_position_y:
case eCSSProperty_transform:
case eCSSProperty_translate:
case eCSSProperty_rotate:
case eCSSProperty_scale:
case eCSSProperty_top:
case eCSSProperty_left:
case eCSSProperty_bottom:

View File

@ -85,7 +85,9 @@ impl Device {
assert!(!pres_context.is_null());
Device {
pres_context,
default_values: ComputedValues::default_values(unsafe { &*(*pres_context).mDocument.mRawPtr }),
default_values: ComputedValues::default_values(unsafe {
&*(*pres_context).mDocument.mRawPtr
}),
// FIXME(bz): Seems dubious?
root_font_size: AtomicIsize::new(FontSize::medium().size().0 as isize),
body_text_color: AtomicUsize::new(unsafe { &*pres_context }.mDefaultColor as usize),

View File

@ -8,7 +8,9 @@ use crate::gecko::values::{convert_nscolor_to_rgba, convert_rgba_to_nscolor};
use crate::gecko_bindings::structs::StyleComplexColor;
use crate::gecko_bindings::structs::StyleComplexColor_Tag as Tag;
use crate::values::computed::{Color as ComputedColor, ColorOrAuto, RGBAColor as ComputedRGBA};
use crate::values::generics::color::{Color as GenericColor, ColorOrAuto as GenericColorOrAuto, ComplexColorRatios};
use crate::values::generics::color::{
Color as GenericColor, ColorOrAuto as GenericColorOrAuto, ComplexColorRatios,
};
impl StyleComplexColor {
/// Create a `StyleComplexColor` value that represents `currentColor`.

View File

@ -9,7 +9,9 @@ use crate::values::animated::ToAnimatedValue;
use crate::values::computed::NonNegativeNumber;
use crate::values::distance::{ComputeSquaredDistance, SquaredDistance};
use crate::values::generics::length as generics;
use crate::values::generics::length::{MaxSize as GenericMaxSize, Size as GenericSize, GenericLengthOrNumber};
use crate::values::generics::length::{
GenericLengthOrNumber, MaxSize as GenericMaxSize, Size as GenericSize,
};
use crate::values::generics::transform::IsZeroLength;
use crate::values::generics::NonNegative;
use crate::values::specified::length::ViewportPercentageLength;

View File

@ -21,7 +21,8 @@ pub type TransformOperation =
pub type Transform = generic::Transform<TransformOperation>;
/// The computed value of a CSS `<transform-origin>`
pub type TransformOrigin = generic::GenericTransformOrigin<LengthPercentage, LengthPercentage, Length>;
pub type TransformOrigin =
generic::GenericTransformOrigin<LengthPercentage, LengthPercentage, Length>;
/// A vector to represent the direction vector (rotate axis) for Rotate3D.
pub type DirectionVector = Vector3D<CSSFloat>;

View File

@ -4,7 +4,7 @@
//! Generic types for CSS values related to backgrounds.
use crate::values::generics::length::{LengthPercentageOrAuto, GenericLengthPercentageOrAuto};
use crate::values::generics::length::{GenericLengthPercentageOrAuto, LengthPercentageOrAuto};
use std::fmt::{self, Write};
use style_traits::{CssWriter, ToCss};

View File

@ -127,8 +127,8 @@ impl Parse for CounterStyleOrNone {
let symbols = Symbols::parse(context, input)?;
// There must be at least two symbols for alphabetic or
// numeric system.
if (symbols_type == SymbolsType::Alphabetic ||
symbols_type == SymbolsType::Numeric) && symbols.0.len() < 2
if (symbols_type == SymbolsType::Alphabetic || symbols_type == SymbolsType::Numeric) &&
symbols.0.len() < 2
{
return Err(input.new_custom_error(StyleParseErrorKind::UnspecifiedError));
}
@ -177,7 +177,7 @@ impl SpecifiedValueInfo for CounterStyleOrNone {
#[repr(transparent)]
pub struct NonNegative<T>(pub T);
impl <T: Add<Output = T>> Add<NonNegative<T>> for NonNegative<T> {
impl<T: Add<Output = T>> Add<NonNegative<T>> for NonNegative<T> {
type Output = Self;
fn add(self, other: Self) -> Self {
@ -185,7 +185,7 @@ impl <T: Add<Output = T>> Add<NonNegative<T>> for NonNegative<T> {
}
}
impl <T: Zero> Zero for NonNegative<T> {
impl<T: Zero> Zero for NonNegative<T> {
fn is_zero(&self) -> bool {
self.0.is_zero()
}

View File

@ -32,7 +32,10 @@ pub use self::GenericPosition as Position;
impl<H, V> Position<H, V> {
/// Returns a new position.
pub fn new(horizontal: H, vertical: V) -> Self {
Self { horizontal, vertical }
Self {
horizontal,
vertical,
}
}
}

View File

@ -98,7 +98,11 @@ pub use self::GenericTransformOrigin as TransformOrigin;
impl<H, V, D> TransformOrigin<H, V, D> {
/// Returns a new transform origin.
pub fn new(horizontal: H, vertical: V, depth: D) -> Self {
Self { horizontal, vertical, depth }
Self {
horizontal,
vertical,
depth,
}
}
}

View File

@ -418,17 +418,7 @@ pub enum ScrollSnapAlignKeyword {
/// https://drafts.csswg.org/css-scroll-snap-1/#scroll-snap-align
#[allow(missing_docs)]
#[cfg_attr(feature = "servo", derive(Deserialize, Serialize))]
#[derive(
Clone,
Copy,
Debug,
Eq,
MallocSizeOf,
PartialEq,
SpecifiedValueInfo,
ToComputedValue,
)]
#[derive(Clone, Copy, Debug, Eq, MallocSizeOf, PartialEq, SpecifiedValueInfo, ToComputedValue)]
#[repr(C)]
pub struct ScrollSnapAlign {
block: ScrollSnapAlignKeyword,

View File

@ -17,7 +17,7 @@ use crate::values::generics::font::{KeywordSize, VariationValue};
use crate::values::generics::NonNegative;
use crate::values::specified::length::{FontBaseSize, AU_PER_PT, AU_PER_PX};
use crate::values::specified::{AllowQuirks, Angle, Integer, LengthPercentage};
use crate::values::specified::{NoCalcLength, Number, NonNegativeNumber, Percentage};
use crate::values::specified::{NoCalcLength, NonNegativeNumber, Number, Percentage};
use crate::values::CustomIdent;
use crate::Atom;
use app_units::Au;

View File

@ -11,11 +11,13 @@ use crate::font_metrics::FontMetricsQueryResult;
use crate::parser::{Parse, ParserContext};
use crate::values::computed::{self, CSSPixelLength, Context};
use crate::values::generics::length as generics;
use crate::values::generics::length::{MaxSize as GenericMaxSize, Size as GenericSize, GenericLengthOrNumber};
use crate::values::generics::length::{
GenericLengthOrNumber, MaxSize as GenericMaxSize, Size as GenericSize,
};
use crate::values::generics::transform::IsZeroLength;
use crate::values::generics::NonNegative;
use crate::values::specified::NonNegativeNumber;
use crate::values::specified::calc::CalcNode;
use crate::values::specified::NonNegativeNumber;
use crate::values::{Auto, CSSFloat, Either, Normal};
use app_units::Au;
use cssparser::{Parser, Token};

View File

@ -19,10 +19,10 @@ use crate::values::serialize_atom_identifier;
use crate::values::specified::calc::CalcNode;
use crate::{Atom, Namespace, Prefix};
use cssparser::{Parser, Token};
use num_traits::{Zero, One};
use num_traits::{One, Zero};
use std::f32;
use std::ops::Add;
use std::fmt::{self, Write};
use std::ops::Add;
use style_traits::values::specified::AllowedNumericType;
use style_traits::{CssWriter, ParseError, SpecifiedValueInfo, StyleParseErrorKind, ToCss};

View File

@ -64,7 +64,7 @@ impl Parse for SVGStrokeDashArray {
NonNegativeSvgLengthPercentageOrNumber::parse(context, i)
})
}) {
return Ok(generic::SVGStrokeDashArray::Values(values))
return Ok(generic::SVGStrokeDashArray::Values(values));
}
try_match_ident_ignore_ascii_case! { input,

View File

@ -14,9 +14,9 @@ use crate::values::computed::{Context, ToComputedValue};
use crate::values::generics::text::InitialLetter as GenericInitialLetter;
use crate::values::generics::text::LineHeight as GenericLineHeight;
use crate::values::generics::text::Spacing;
use crate::values::specified::length::NonNegativeLengthPercentage;
use crate::values::specified::length::{FontRelativeLength, Length};
use crate::values::specified::length::{LengthPercentage, NoCalcLength};
use crate::values::specified::length::{NonNegativeLengthPercentage};
use crate::values::specified::{AllowQuirks, Integer, NonNegativeNumber, Number};
use cssparser::{Parser, Token};
use selectors::parser::SelectorParseErrorKind;

View File

@ -231,7 +231,7 @@ impl Parse for Transform {
}
/// The specified value of a component of a CSS `<transform-origin>`.
#[derive(Clone, Debug, MallocSizeOf, PartialEq, Parse, SpecifiedValueInfo, ToCss)]
#[derive(Clone, Debug, MallocSizeOf, Parse, PartialEq, SpecifiedValueInfo, ToCss)]
pub enum OriginComponent<S> {
/// `center`
Center,

View File

@ -70,12 +70,14 @@ pub fn derive(mut input: DeriveInput) -> TokenStream {
{
let mut where_clause = input.generics.where_clause.take();
for param in input.generics.type_params() {
cg::add_predicate(&mut where_clause, parse_quote!(#param: crate::parser::Parse));
cg::add_predicate(
&mut where_clause,
parse_quote!(#param: crate::parser::Parse),
);
}
input.generics.where_clause = where_clause;
}
let name = &input.ident;
let s = Structure::new(&input);
@ -140,13 +142,8 @@ pub fn derive(mut input: DeriveInput) -> TokenStream {
let mut parse_non_keywords = quote! {};
for (i, (variant, css_attrs, parse_attrs)) in non_keywords.iter().enumerate() {
let skip_try = !has_keywords && i == non_keywords.len() - 1;
let parse_variant = parse_non_keyword_variant(
name,
variant,
css_attrs,
parse_attrs,
skip_try,
);
let parse_variant =
parse_non_keyword_variant(name, variant, css_attrs, parse_attrs, skip_try);
parse_non_keywords.extend(parse_variant);
}

View File

@ -856,22 +856,42 @@ pub extern "C" fn Servo_AnimationValue_Color(
}
#[no_mangle]
pub extern "C" fn Servo_AnimationValue_GetTransform(
pub unsafe extern "C" fn Servo_AnimationValue_GetTransform(
value: RawServoAnimationValueBorrowed,
list: *mut structs::RefPtr<nsCSSValueSharedList>,
) {
let list = &mut *list;
let value = AnimationValue::as_arc(&value);
if let AnimationValue::Transform(ref servo_list) = **value {
let list = unsafe { &mut *list };
if servo_list.0.is_empty() {
unsafe {
match **value {
AnimationValue::Transform(ref servo_list) => {
if servo_list.0.is_empty() {
list.set_move(RefPtr::from_addrefed(Gecko_NewNoneTransform()));
} else {
gecko_properties::convert_transform(&servo_list.0, list);
}
},
AnimationValue::Translate(ref v) => {
if let Some(v) = v.to_transform_operation() {
gecko_properties::convert_transform(&[v], list);
} else {
list.set_move(RefPtr::from_addrefed(Gecko_NewNoneTransform()));
}
} else {
gecko_properties::convert_transform(&servo_list.0, list);
}
} else {
panic!("The AnimationValue should be transform");
},
AnimationValue::Rotate(ref v) => {
if let Some(v) = v.to_transform_operation() {
gecko_properties::convert_transform(&[v], list);
} else {
list.set_move(RefPtr::from_addrefed(Gecko_NewNoneTransform()));
}
},
AnimationValue::Scale(ref v) => {
if let Some(v) = v.to_transform_operation() {
gecko_properties::convert_transform(&[v], list);
} else {
list.set_move(RefPtr::from_addrefed(Gecko_NewNoneTransform()));
}
},
_ => unreachable!("Unsupported transform-like animation value"),
}
}

View File

@ -22,6 +22,8 @@ ADD topsrcdir/taskcluster/scripts/run-task /usr/local/bin/run-task
ADD build-image.sh /usr/local/bin/build-image.sh
ADD download-and-compress /usr/local/bin/download-and-compress
ADD setup.sh /setup/setup.sh
ADD requirements/py2.txt /setup/requirements-py2.txt
ADD requirements/py3.txt /setup/requirements-py3.txt
RUN bash /setup/setup.sh
# Setup a workspace that won't use AUFS.

View File

@ -43,9 +43,13 @@ run-task \
-- \
sh -x -c "$LOAD_COMMAND \
/builds/worker/checkouts/gecko/mach taskcluster-build-image \
-t \"$IMAGE_NAME:$HASH\" \
-t \"${IMAGE_NAME}:${HASH}-pre\" \
\"$IMAGE_NAME\""
# Squash the image
export DOCKER_HOST=unix:/$DOCKER_SOCKET
/usr/local/bin/docker-squash -v -t "${IMAGE_NAME}:${HASH}" "${IMAGE_NAME}:${HASH}-pre"
# Create artifact folder (note that this must occur after run-task)
mkdir -p /builds/worker/workspace/artifacts

View File

@ -0,0 +1,2 @@
# For compressing docker images
zstandard

View File

@ -0,0 +1,67 @@
#
# This file is autogenerated by pip-compile
# To update, run:
#
# pip-compile --generate-hashes --output-file requirements/py2.txt requirements/py2.in
#
cffi==1.12.1 \
--hash=sha256:0b5f895714a7a9905148fc51978c62e8a6cbcace30904d39dcd0d9e2265bb2f6 \
--hash=sha256:27cdc7ba35ee6aa443271d11583b50815c4bb52be89a909d0028e86c21961709 \
--hash=sha256:2d4a38049ea93d5ce3c7659210393524c1efc3efafa151bd85d196fa98fce50a \
--hash=sha256:3262573d0d60fc6b9d0e0e6e666db0e5045cbe8a531779aa0deb3b425ec5a282 \
--hash=sha256:358e96cfffc185ab8f6e7e425c7bb028931ed08d65402fbcf3f4e1bff6e66556 \
--hash=sha256:37c7db824b5687fbd7ea5519acfd054c905951acc53503547c86be3db0580134 \
--hash=sha256:39b9554dfe60f878e0c6ff8a460708db6e1b1c9cc6da2c74df2955adf83e355d \
--hash=sha256:42b96a77acf8b2d06821600fa87c208046decc13bd22a4a0e65c5c973443e0da \
--hash=sha256:5b37dde5035d3c219324cac0e69d96495970977f310b306fa2df5910e1f329a1 \
--hash=sha256:5d35819f5566d0dd254f273d60cf4a2dcdd3ae3003dfd412d40b3fe8ffd87509 \
--hash=sha256:5df73aa465e53549bd03c819c1bc69fb85529a5e1a693b7b6cb64408dd3970d1 \
--hash=sha256:7075b361f7a4d0d4165439992d0b8a3cdfad1f302bf246ed9308a2e33b046bd3 \
--hash=sha256:7678b5a667b0381c173abe530d7bdb0e6e3b98e062490618f04b80ca62686d96 \
--hash=sha256:7dfd996192ff8a535458c17f22ff5eb78b83504c34d10eefac0c77b1322609e2 \
--hash=sha256:8a3be5d31d02c60f84c4fd4c98c5e3a97b49f32e16861367f67c49425f955b28 \
--hash=sha256:9812e53369c469506b123aee9dcb56d50c82fad60c5df87feb5ff59af5b5f55c \
--hash=sha256:9b6f7ba4e78c52c1a291d0c0c0bd745d19adde1a9e1c03cb899f0c6efd6f8033 \
--hash=sha256:a85bc1d7c3bba89b3d8c892bc0458de504f8b3bcca18892e6ed15b5f7a52ad9d \
--hash=sha256:aa6b9c843ad645ebb12616de848cc4e25a40f633ccc293c3c9fe34107c02c2ea \
--hash=sha256:bae1aa56ee00746798beafe486daa7cfb586cd395c6ce822ba3068e48d761bc0 \
--hash=sha256:bae96e26510e4825d5910a196bf6b5a11a18b87d9278db6d08413be8ea799469 \
--hash=sha256:bd78df3b594013b227bf31d0301566dc50ba6f40df38a70ded731d5a8f2cb071 \
--hash=sha256:c2711197154f46d06f73542c539a0ff5411f1951fab391e0a4ac8359badef719 \
--hash=sha256:d998c20e3deed234fca993fd6c8314cb7cbfda05fd170f1bd75bb5d7421c3c5a \
--hash=sha256:df4f840d77d9e37136f8e6b432fecc9d6b8730f18f896e90628712c793466ce6 \
--hash=sha256:f5653c2581acb038319e6705d4e3593677676df14b112f13e0b5b44b6a18df1a \
--hash=sha256:f7c7aa485a2e2250d455148470ffd0195eecc3d845122635202d7467d6f7b4cf \
--hash=sha256:f9e2c66a6493147de835f207f198540a56b26745ce4f272fbc7c2f2cfebeb729 \
# via zstandard
pycparser==2.19 \
--hash=sha256:a988718abfad80b6b157acce7bf130a30876d27603738ac39f140993246b25b3 \
# via cffi
zstandard==0.10.2 \
--hash=sha256:08114ac056944e7f70c0faf99d0afbce08b078eacf8ee6698985654c7e725234 \
--hash=sha256:087276799ddf3200b4724e3d6f57b11ba975d9243b4af9e95721397d795a2497 \
--hash=sha256:0c21feac9f7c850a457b1c707c3cc4f3b8f475a3c9120f8cec82ebc3b215b80a \
--hash=sha256:0fe6403a01e996a7247239691101148dc4071ccf7fe12b680d7b6c91a04aefbb \
--hash=sha256:1383412acd5356ff543c434723f2e7794c77e1ed4efc1062464cc2112c09af50 \
--hash=sha256:2acd18eeac4fcecef8c1b95d4ffaa606222aa1ba0d4372e829dc516b0504e6ef \
--hash=sha256:302bd7b3bc7281015cd6f975207755c534551d0a32c79147518f2de0459dbef4 \
--hash=sha256:390acfced0106fb12247e12c2aa399836e6686f5ba9daec332957ff830f215cd \
--hash=sha256:43ec51075547d498ec6e7952e459c3817e610d6e4ca68f4fa43a16ccea01d496 \
--hash=sha256:53f89a65d52d6fb56b2c5dd0445f30ca25852f344ba20de325ce6767dd842fca \
--hash=sha256:5f4f650b83b8085862de9e555d87f6053ca577b4070f4c6610a870116c4dd1f4 \
--hash=sha256:72ef2361d90a717457376351acb5b1b0c189a09dbd95adcb51907a96b79a6add \
--hash=sha256:7ef5c7ede8e8cda2a37c0ecab456f4cfae2c42049f51b24edb5303dbfe318ea6 \
--hash=sha256:86c9dee0fe6d4ea5bf394767929fdf5f924d161d9a6d23adcd58a690c5e160b0 \
--hash=sha256:8b587c9a17f4b050274d9b7f9284d5fae0a8d6a8021f88f779345593326bc33d \
--hash=sha256:91025801859a60b7761dea6a8b645f25be6d3639ef828423f094d90b3f60850e \
--hash=sha256:9d2940e2801cc768d2cb71e71dca3b025ca3737e9d1d0fad0c95b2e7db0c947a \
--hash=sha256:aa520b90eede823632013a319e91652d8226a6309a104cffdc7e00d5a2b5e66b \
--hash=sha256:b10fba39049595827f228e77e7b5070cb39c46466bf8fef51da73220a20cc717 \
--hash=sha256:c794b5c21485fb3232f5693995ba1a497267b1aecb70b218107cf131f8dc1d3d \
--hash=sha256:d05516bc197c5b7b2aa2f834ea7c5ee9fd9aa3034f4193cc05d899b18251aa9c \
--hash=sha256:d085c2c676f03357e5d6b11dbbf4e8c1b0d20b1066ac87e6cccc45d4b6c19675 \
--hash=sha256:dd40e26aaee67b9078618b0fce3d5f209e328852f2c72c6772cf6352f57d2ed1 \
--hash=sha256:e7b84c10ed30c1c997d81ef271945372fba9e18ac58d77a17d43fd9c42392ed4 \
--hash=sha256:e982d8af9618d45b25456f1f80e6d628295772d74d755f9a46b90711b7a56067 \
--hash=sha256:ef24c8ec97f93b2bdf1080553cdf38ea9ab195846b679cdcfe683c945ed2f1ee \
--hash=sha256:f46c5021c3663f82c2ff994295a8574638d56a831ca2a26d736d47fbcf4f9187

View File

@ -0,0 +1,8 @@
# The docker module removed support for docker engines < 1.21 starting with
# docker 3.0. Once we upgrade the docker running on our workers, we can remove
# this restriction here.
docker<3.0
docker-squash
# For compressing docker images
zstandard

View File

@ -0,0 +1,104 @@
#
# This file is autogenerated by pip-compile
# To update, run:
#
# pip-compile --generate-hashes --output-file requirements/py3.txt requirements/py3.in
#
certifi==2018.11.29 \
--hash=sha256:47f9c83ef4c0c621eaef743f133f09fa8a74a9b75f037e8624f83bd1b6626cb7 \
--hash=sha256:993f830721089fef441cdfeb4b2c8c9df86f0c63239f06bd025a76a7daddb033 \
# via requests
cffi==1.12.1 \
--hash=sha256:0b5f895714a7a9905148fc51978c62e8a6cbcace30904d39dcd0d9e2265bb2f6 \
--hash=sha256:27cdc7ba35ee6aa443271d11583b50815c4bb52be89a909d0028e86c21961709 \
--hash=sha256:2d4a38049ea93d5ce3c7659210393524c1efc3efafa151bd85d196fa98fce50a \
--hash=sha256:3262573d0d60fc6b9d0e0e6e666db0e5045cbe8a531779aa0deb3b425ec5a282 \
--hash=sha256:358e96cfffc185ab8f6e7e425c7bb028931ed08d65402fbcf3f4e1bff6e66556 \
--hash=sha256:37c7db824b5687fbd7ea5519acfd054c905951acc53503547c86be3db0580134 \
--hash=sha256:39b9554dfe60f878e0c6ff8a460708db6e1b1c9cc6da2c74df2955adf83e355d \
--hash=sha256:42b96a77acf8b2d06821600fa87c208046decc13bd22a4a0e65c5c973443e0da \
--hash=sha256:5b37dde5035d3c219324cac0e69d96495970977f310b306fa2df5910e1f329a1 \
--hash=sha256:5d35819f5566d0dd254f273d60cf4a2dcdd3ae3003dfd412d40b3fe8ffd87509 \
--hash=sha256:5df73aa465e53549bd03c819c1bc69fb85529a5e1a693b7b6cb64408dd3970d1 \
--hash=sha256:7075b361f7a4d0d4165439992d0b8a3cdfad1f302bf246ed9308a2e33b046bd3 \
--hash=sha256:7678b5a667b0381c173abe530d7bdb0e6e3b98e062490618f04b80ca62686d96 \
--hash=sha256:7dfd996192ff8a535458c17f22ff5eb78b83504c34d10eefac0c77b1322609e2 \
--hash=sha256:8a3be5d31d02c60f84c4fd4c98c5e3a97b49f32e16861367f67c49425f955b28 \
--hash=sha256:9812e53369c469506b123aee9dcb56d50c82fad60c5df87feb5ff59af5b5f55c \
--hash=sha256:9b6f7ba4e78c52c1a291d0c0c0bd745d19adde1a9e1c03cb899f0c6efd6f8033 \
--hash=sha256:a85bc1d7c3bba89b3d8c892bc0458de504f8b3bcca18892e6ed15b5f7a52ad9d \
--hash=sha256:aa6b9c843ad645ebb12616de848cc4e25a40f633ccc293c3c9fe34107c02c2ea \
--hash=sha256:bae1aa56ee00746798beafe486daa7cfb586cd395c6ce822ba3068e48d761bc0 \
--hash=sha256:bae96e26510e4825d5910a196bf6b5a11a18b87d9278db6d08413be8ea799469 \
--hash=sha256:bd78df3b594013b227bf31d0301566dc50ba6f40df38a70ded731d5a8f2cb071 \
--hash=sha256:c2711197154f46d06f73542c539a0ff5411f1951fab391e0a4ac8359badef719 \
--hash=sha256:d998c20e3deed234fca993fd6c8314cb7cbfda05fd170f1bd75bb5d7421c3c5a \
--hash=sha256:df4f840d77d9e37136f8e6b432fecc9d6b8730f18f896e90628712c793466ce6 \
--hash=sha256:f5653c2581acb038319e6705d4e3593677676df14b112f13e0b5b44b6a18df1a \
--hash=sha256:f7c7aa485a2e2250d455148470ffd0195eecc3d845122635202d7467d6f7b4cf \
--hash=sha256:f9e2c66a6493147de835f207f198540a56b26745ce4f272fbc7c2f2cfebeb729 \
# via zstandard
chardet==3.0.4 \
--hash=sha256:84ab92ed1c4d4f16916e05906b6b75a6c0fb5db821cc65e70cbd64a3e2a5eaae \
--hash=sha256:fc323ffcaeaed0e0a02bf4d117757b98aed530d9ed4531e3e15460124c106691 \
# via requests
docker-pycreds==0.4.0 \
--hash=sha256:6ce3270bcaf404cc4c3e27e4b6c70d3521deae82fb508767870fdbf772d584d4 \
--hash=sha256:7266112468627868005106ec19cd0d722702d2b7d5912a28e19b826c3d37af49 \
# via docker
docker-squash==1.0.7 \
--hash=sha256:95ca24fbeffa915c3d467b7ad538c6437a02f68aaa4e31e16c451c47a30a2169
docker==2.7.0 \
--hash=sha256:144248308e8ea31c4863c6d74e1b55daf97cc190b61d0fe7b7313ab920d6a76c \
--hash=sha256:c1d4e37b1ea03b2b6efdd0379640f6ea372fefe56efa65d4d17c34c6b9d54558
idna==2.8 \
--hash=sha256:c357b3f628cf53ae2c4c05627ecc484553142ca23264e593d327bcde5e9c3407 \
--hash=sha256:ea8b7f6188e6fa117537c3df7da9fc686d485087abf6ac197f9c46432f7e4a3c \
# via requests
pycparser==2.19 \
--hash=sha256:a988718abfad80b6b157acce7bf130a30876d27603738ac39f140993246b25b3 \
# via cffi
requests==2.21.0 \
--hash=sha256:502a824f31acdacb3a35b6690b5fbf0bc41d63a24a45c4004352b0242707598e \
--hash=sha256:7bf2a778576d825600030a110f3c0e3e8edc51dfaafe1c146e39a2027784957b \
# via docker
six==1.12.0 \
--hash=sha256:3350809f0555b11f552448330d0b52d5f24c91a322ea4a15ef22629740f3761c \
--hash=sha256:d16a0141ec1a18405cd4ce8b4613101da75da0e9a7aec5bdd4fa804d0e0eba73 \
# via docker, docker-pycreds, docker-squash, websocket-client
urllib3==1.24.1 \
--hash=sha256:61bf29cada3fc2fbefad4fdf059ea4bd1b4a86d2b6d15e1c7c0b582b9752fe39 \
--hash=sha256:de9529817c93f27c8ccbfead6985011db27bd0ddfcdb2d86f3f663385c6a9c22 \
# via requests
websocket-client==0.54.0 \
--hash=sha256:8c8bf2d4f800c3ed952df206b18c28f7070d9e3dcbd6ca6291127574f57ee786 \
--hash=sha256:e51562c91ddb8148e791f0155fdb01325d99bb52c4cdbb291aee7a3563fd0849 \
# via docker
zstandard==0.10.2 \
--hash=sha256:08114ac056944e7f70c0faf99d0afbce08b078eacf8ee6698985654c7e725234 \
--hash=sha256:087276799ddf3200b4724e3d6f57b11ba975d9243b4af9e95721397d795a2497 \
--hash=sha256:0c21feac9f7c850a457b1c707c3cc4f3b8f475a3c9120f8cec82ebc3b215b80a \
--hash=sha256:0fe6403a01e996a7247239691101148dc4071ccf7fe12b680d7b6c91a04aefbb \
--hash=sha256:1383412acd5356ff543c434723f2e7794c77e1ed4efc1062464cc2112c09af50 \
--hash=sha256:2acd18eeac4fcecef8c1b95d4ffaa606222aa1ba0d4372e829dc516b0504e6ef \
--hash=sha256:302bd7b3bc7281015cd6f975207755c534551d0a32c79147518f2de0459dbef4 \
--hash=sha256:390acfced0106fb12247e12c2aa399836e6686f5ba9daec332957ff830f215cd \
--hash=sha256:43ec51075547d498ec6e7952e459c3817e610d6e4ca68f4fa43a16ccea01d496 \
--hash=sha256:53f89a65d52d6fb56b2c5dd0445f30ca25852f344ba20de325ce6767dd842fca \
--hash=sha256:5f4f650b83b8085862de9e555d87f6053ca577b4070f4c6610a870116c4dd1f4 \
--hash=sha256:72ef2361d90a717457376351acb5b1b0c189a09dbd95adcb51907a96b79a6add \
--hash=sha256:7ef5c7ede8e8cda2a37c0ecab456f4cfae2c42049f51b24edb5303dbfe318ea6 \
--hash=sha256:86c9dee0fe6d4ea5bf394767929fdf5f924d161d9a6d23adcd58a690c5e160b0 \
--hash=sha256:8b587c9a17f4b050274d9b7f9284d5fae0a8d6a8021f88f779345593326bc33d \
--hash=sha256:91025801859a60b7761dea6a8b645f25be6d3639ef828423f094d90b3f60850e \
--hash=sha256:9d2940e2801cc768d2cb71e71dca3b025ca3737e9d1d0fad0c95b2e7db0c947a \
--hash=sha256:aa520b90eede823632013a319e91652d8226a6309a104cffdc7e00d5a2b5e66b \
--hash=sha256:b10fba39049595827f228e77e7b5070cb39c46466bf8fef51da73220a20cc717 \
--hash=sha256:c794b5c21485fb3232f5693995ba1a497267b1aecb70b218107cf131f8dc1d3d \
--hash=sha256:d05516bc197c5b7b2aa2f834ea7c5ee9fd9aa3034f4193cc05d899b18251aa9c \
--hash=sha256:d085c2c676f03357e5d6b11dbbf4e8c1b0d20b1066ac87e6cccc45d4b6c19675 \
--hash=sha256:dd40e26aaee67b9078618b0fce3d5f209e328852f2c72c6772cf6352f57d2ed1 \
--hash=sha256:e7b84c10ed30c1c997d81ef271945372fba9e18ac58d77a17d43fd9c42392ed4 \
--hash=sha256:e982d8af9618d45b25456f1f80e6d628295772d74d755f9a46b90711b7a56067 \
--hash=sha256:ef24c8ec97f93b2bdf1080553cdf38ea9ab195846b679cdcfe683c945ed2f1ee \
--hash=sha256:f46c5021c3663f82c2ff994295a8574638d56a831ca2a26d736d47fbcf4f9187

View File

@ -34,26 +34,12 @@ chmod +x /usr/local/bin/download-and-compress
# Create workspace
mkdir -p /builds/worker/workspace
# Install python-zstandard.
(
cd /setup
tooltool_fetch <<EOF
[
{
"size": 558068,
"visibility": "public",
"digest": "72b1fc542e5af36fc660d7b8d3882f0a25644d3b66316293717aabf9ba8cf578e49e2cf45e63e962c5535ec1f8b3e83248c379d34b0cab2ef1a950205ad153ce",
"algorithm": "sha512",
"filename": "zstandard-0.9.0.tar.gz"
}
]
EOF
)
# We need to install for both Python 2 and 3 because `mach taskcluster-load-image`
# uses Python 2 and `download-and-compress` uses Python 3.
/usr/bin/pip -v install /setup/zstandard-0.9.0.tar.gz
/usr/bin/pip3 -v install /setup/zstandard-0.9.0.tar.gz
# We also need to make sure to explicitly install python3-distutils so that it doesn't get purged later
apt-get install -y python3-distutils
/usr/bin/pip -v install -r /setup/requirements-py2.txt
/usr/bin/pip3 -v install -r /setup/requirements-py3.txt
# python-pip only needed to install python-zstandard. Removing it removes
# several hundred MB of dependencies from the image.

View File

@ -214,7 +214,12 @@ def fill_template(config, tasks):
# Force images built against the in-tree image builder to
# have a different digest by adding a fixed string to the
# hashed data.
# Append to this data whenever the image builder's output behavior
# is changed, in order to force all downstream images to be rebuilt and
# cached distinctly.
digest_data.append('image_builder')
# Updated for squashing images in Bug 1527394
digest_data.append('squashing layers')
worker['caches'] = [{
'type': 'persistent',

View File

@ -28,6 +28,7 @@ measure = fnbpaint, fcp, hero, dcf, ttfi, loadtime
hero = hero1
[raptor-tp6-facebook-firefox]
disabled = Bug 1506936
apps = firefox
test_url = https://www.facebook.com
playback_recordings = facebook.mp

View File

@ -108,7 +108,7 @@ def test_get_raptor_test_list_firefox(create_args):
args = create_args()
test_list = get_raptor_test_list(args, mozinfo.os)
assert len(test_list) == 4
assert len(test_list) == 3
subtests = ['raptor-tp6-google-firefox', 'raptor-tp6-amazon-firefox',
'raptor-tp6-facebook-firefox', 'raptor-tp6-youtube-firefox']

View File

@ -3,7 +3,7 @@
disabled:
if os == "mac": https://bugzilla.mozilla.org/show_bug.cgi?id=1526212
if os == "android" and not debug: https://bugzilla.mozilla.org/show_bug.cgi?id=1526212
if os == "windows" and debug: https://bugzilla.mozilla.org/show_bug.cgi?id=1526212
if os == "win" and debug: https://bugzilla.mozilla.org/show_bug.cgi?id=1526212
expected:
if not debug and not webrender and not e10s and (os == "android") and (version == "Ubuntu 16.04") and (processor == "x86") and (bits == 32): PASS
FAIL