Merge autoland to mozilla-central. a=merge

This commit is contained in:
Cosmin Sabou 2018-01-27 11:52:48 +02:00
commit 3f242e7301
176 changed files with 18689 additions and 1398 deletions

View File

@ -304,22 +304,12 @@ function promiseTabLoaded(aTab) {
* @param aShouldBeCleared
* True if each visit to the URI should be cleared, false otherwise
*/
function promiseHistoryClearedState(aURIs, aShouldBeCleared) {
return new Promise(resolve => {
let callbackCount = 0;
let niceStr = aShouldBeCleared ? "no longer" : "still";
function callbackDone() {
if (++callbackCount == aURIs.length)
resolve();
}
aURIs.forEach(function(aURI) {
PlacesUtils.asyncHistory.isURIVisited(aURI, function(uri, isVisited) {
is(isVisited, !aShouldBeCleared,
"history visit " + uri.spec + " should " + niceStr + " exist");
callbackDone();
});
});
});
async function promiseHistoryClearedState(aURIs, aShouldBeCleared) {
for (let uri of aURIs) {
let visited = await PlacesUtils.history.hasVisits(uri);
Assert.equal(visited, !aShouldBeCleared,
`history visit ${uri.spec} should ${aShouldBeCleared ? "no longer" : "still"} exist`);
}
}
var FullZoomHelper = {

View File

@ -412,7 +412,7 @@ async function GetWindowsPasswordsResource(aProfileFolder) {
return;
}
let crypto = new OSCrypto();
let logins = [];
for (let row of rows) {
try {
let origin_url = NetUtil.newURI(row.getResultByName("origin_url"));
@ -454,11 +454,18 @@ async function GetWindowsPasswordsResource(aProfileFolder) {
throw new Error("Login data scheme type not supported: " +
row.getResultByName("scheme"));
}
MigrationUtils.insertLoginWrapper(loginInfo);
logins.push(loginInfo);
} catch (e) {
Cu.reportError(e);
}
}
try {
if (logins.length > 0) {
await MigrationUtils.insertLoginsWrapper(logins);
}
} catch (e) {
Cu.reportError(e);
}
crypto.finalize();
aCallback(true);
},

View File

@ -1058,6 +1058,20 @@ this.MigrationUtils = Object.freeze({
}
},
async insertLoginsWrapper(logins) {
this._importQuantities.logins += logins.length;
let inserted = await LoginHelper.maybeImportLogins(logins);
// Note that this means that if we import a login that has a newer password
// than we know about, we will update the login, and an undo of the import
// will not revert this. This seems preferable over removing the login
// outright or storing the old password in the undo file.
if (gKeepUndoData) {
for (let {guid, timePasswordChanged} of inserted) {
gUndoData.get("logins").push({guid, timePasswordChanged});
}
}
},
initializeUndoData() {
gKeepUndoData = true;
gUndoData = new Map([["bookmarks", []], ["visits", []], ["logins", []]]);

View File

@ -142,18 +142,6 @@ function synthesizeClickOnSelectedTreeCell(aTree, aOptions) {
aTree.ownerGlobal);
}
/**
* Asynchronously check a url is visited.
*
* @param aURI The URI.
* @return {Promise}
* @resolves When the check has been added successfully.
* @rejects JavaScript exception.
*/
function promiseIsURIVisited(aURI) {
return PlacesUtils.history.hasVisits(aURI);
}
/**
* Makes the specified toolbar visible or invisible and returns a Promise object
* that is resolved when the toolbar has completed any animations associated

View File

@ -672,12 +672,10 @@
&certmgr.tab.others2;
&certmgr.tab.websites3;
&certmgr.tab.ca;
&certmgr.tab.orphan2;
&certmgr.mine2;
&certmgr.others2;
&certmgr.websites3;
&certmgr.cas2;
&certmgr.orphans2;
&certmgr.certname;
&certmgr.tokenname;
&certmgr.view2.label;

View File

@ -320,7 +320,7 @@ client should remember what the expiration events were for an experiment
and honor them.
The rationale here is that we want to prevent an accidental deletion
or temporary failure on the server to inadvertantly deactivate
or temporary failure on the server to inadvertently deactivate
supposed-to-be-active experiments. We also don't want premature deletion
of an experiment from the manifest to result in indefinite activation
periods.

View File

@ -275,7 +275,7 @@ In some cases, for convenience, it is possible to set both
This allows to use ``mylib`` in the ``USE_LIBS`` of another library or
executable.
When refering to a ``Library`` name building both types of libraries in
When referring to a ``Library`` name building both types of libraries in
``USE_LIBS``, the shared library is chosen to be linked. But sometimes,
it is wanted to link the static version, in which case the ``Library`` name
needs to be prefixed with ``static:`` in ``USE_LIBS``

View File

@ -32,7 +32,7 @@ Glossary
A large part of the build system consists of copying files
around to appropriate places. We write out special files
describing the set of required operations so we can process the
actions effeciently. These files are install manifests.
actions efficiently. These files are install manifests.
clobber build
A build performed with an initially empty object directory. All

View File

@ -54,7 +54,7 @@ bits
Universal Mac builds do not have this key defined.
Unkown processor architectures (see ``processor`` below) may not have
Unknown processor architectures (see ``processor`` below) may not have
this key defined.
Optional.

View File

@ -17,5 +17,7 @@ unset MAKECAB
unset TOOLCHAIN_PREFIX
unset BINDGEN_CFLAGS
unset LLVM_CONFIG
unset WIN64_LINK
unset WIN64_LIB
unset MOZ_STDCXX_COMPAT

View File

@ -8580,12 +8580,6 @@ nsDocument::Sanitize()
}
}
struct SubDocEnumArgs
{
nsIDocument::nsSubDocEnumFunc callback;
void *data;
};
void
nsDocument::EnumerateSubDocuments(nsSubDocEnumFunc aCallback, void *aData)
{
@ -8610,6 +8604,27 @@ nsDocument::EnumerateSubDocuments(nsSubDocEnumFunc aCallback, void *aData)
}
}
void
nsDocument::CollectDescendantDocuments(
nsTArray<nsCOMPtr<nsIDocument>>& aDescendants,
nsDocTestFunc aCallback) const
{
if (!mSubDocuments) {
return;
}
for (auto iter = mSubDocuments->Iter(); !iter.Done(); iter.Next()) {
auto entry = static_cast<SubDocMapEntry*>(iter.Get());
const nsIDocument* subdoc = entry->mSubDocument;
if (subdoc) {
if (aCallback(subdoc)) {
aDescendants.AppendElement(entry->mSubDocument);
}
subdoc->CollectDescendantDocuments(aDescendants, aCallback);
}
}
}
#ifdef DEBUG_bryner
#define DEBUG_PAGE_CACHE
#endif

View File

@ -620,6 +620,10 @@ public:
virtual void UpdateIntersectionObservations() override;
virtual void ScheduleIntersectionObserverNotification() override;
virtual void NotifyIntersectionObservers() override;
virtual bool HasIntersectionObservers() const override
{
return !mIntersectionObservers.IsEmpty();
}
virtual void NotifyLayerManagerRecreated() override;
@ -678,6 +682,9 @@ public:
virtual void EnumerateSubDocuments(nsSubDocEnumFunc aCallback,
void *aData) override;
virtual void CollectDescendantDocuments(
nsTArray<nsCOMPtr<nsIDocument>>& aDescendants,
nsDocTestFunc aCallback) const override;
virtual bool CanSavePresentation(nsIRequest *aNewRequest) override;
virtual void Destroy() override;

View File

@ -1916,6 +1916,15 @@ public:
virtual void EnumerateSubDocuments(nsSubDocEnumFunc aCallback,
void *aData) = 0;
/**
* Collect all the descendant documents for which |aCalback| returns true.
* The callback function must not mutate any state for the given document.
*/
typedef bool (*nsDocTestFunc)(const nsIDocument* aDocument);
virtual void CollectDescendantDocuments(
nsTArray<nsCOMPtr<nsIDocument>>& aDescendants,
nsDocTestFunc aCallback) const = 0;
/**
* Check whether it is safe to cache the presentation of this document
* and all of its subdocuments. This method checks the following conditions
@ -3111,6 +3120,7 @@ public:
virtual void UpdateIntersectionObservations() = 0;
virtual void ScheduleIntersectionObserverNotification() = 0;
virtual void NotifyIntersectionObservers() = 0;
virtual bool HasIntersectionObservers() const = 0;
// Dispatch a runnable related to the document.
virtual nsresult Dispatch(mozilla::TaskCategory aCategory,

View File

@ -666,8 +666,7 @@ HTMLEditor::HandleKeyPressEvent(WidgetKeyboardEvent* aKeyboardEvent)
return TypedText(NS_LITERAL_STRING("\t"), eTypedText);
}
case NS_VK_RETURN:
if (aKeyboardEvent->IsControl() || aKeyboardEvent->IsAlt() ||
aKeyboardEvent->IsMeta() || aKeyboardEvent->IsOS()) {
if (!aKeyboardEvent->IsInputtingLineBreak()) {
return NS_OK;
}
aKeyboardEvent->PreventDefault(); // consumed
@ -679,11 +678,7 @@ HTMLEditor::HandleKeyPressEvent(WidgetKeyboardEvent* aKeyboardEvent)
return TypedText(EmptyString(), eTypedBreak);
}
// NOTE: On some keyboard layout, some characters are inputted with Control
// key or Alt key, but at that time, widget sets FALSE to these keys.
if (!aKeyboardEvent->mCharCode || aKeyboardEvent->IsControl() ||
aKeyboardEvent->IsAlt() || aKeyboardEvent->IsMeta() ||
aKeyboardEvent->IsOS()) {
if (!aKeyboardEvent->IsInputtingText()) {
// we don't PreventDefault() here or keybindings like control-x won't work
return NS_OK;
}

View File

@ -375,20 +375,14 @@ TextEditor::HandleKeyPressEvent(WidgetKeyboardEvent* aKeyboardEvent)
return TypedText(NS_LITERAL_STRING("\t"), eTypedText);
}
case NS_VK_RETURN:
if (IsSingleLineEditor() || aKeyboardEvent->IsControl() ||
aKeyboardEvent->IsAlt() || aKeyboardEvent->IsMeta() ||
aKeyboardEvent->IsOS()) {
if (IsSingleLineEditor() || !aKeyboardEvent->IsInputtingLineBreak()) {
return NS_OK;
}
aKeyboardEvent->PreventDefault();
return TypedText(EmptyString(), eTypedBreak);
}
// NOTE: On some keyboard layout, some characters are inputted with Control
// key or Alt key, but at that time, widget sets FALSE to these keys.
if (!aKeyboardEvent->mCharCode || aKeyboardEvent->IsControl() ||
aKeyboardEvent->IsAlt() || aKeyboardEvent->IsMeta() ||
aKeyboardEvent->IsOS()) {
if (!aKeyboardEvent->IsInputtingText()) {
// we don't PreventDefault() here or keybindings like control-x won't work
return NS_OK;
}

View File

@ -48,10 +48,12 @@ Option B:
roughly:
- Update your mozilla-central checkout to the latest code on mozilla-central.
- Check out and update the webrender repo to the version you want
- Copy over the webrender and webrender_api folders into gfx/. The best way
to do this is to simply delete the gfx/webrender and gfx/webrender_api
folders and use |cp -R| to copy them in again from the webrender repo. Update
the "latest commit" information at the bottom of this file with the version.
- Copy over the webrender, webrender_api, and part of the wrench folders into
gfx/. The best way to do this is to simply delete the gfx/webrender,
gfx/webrender_api, and gfx/wrench folders and use |cp -R| to copy them in
again from the webrender repo, and then delete the gfx/wrench/reftests,
gfx/wrench/benchmarks, and gfx/wrench/script folders. Update the "latest
commit" information at the bottom of this file with the version.
- If you need to modify webrender_bindings/Cargo.toml file, do so now. Changes
at this step usually consist of:
(a) Updating version numbers. Go through the version numbers of ALL the
@ -175,4 +177,4 @@ Troubleshooting tips:
-------------------------------------------------------------------------------
The version of WebRender currently in the tree is:
c0943271eb8c6440a61db37e2f1e84201dcac2e3
1d8157c71f88d5c673f5d084f02515ab74263814

View File

@ -36,7 +36,7 @@ serde_json = { optional = true, version = "1.0" }
serde = { optional = true, version = "1.0", features = ["serde_derive"] }
image = { optional = true, version = "0.17" }
base64 = { optional = true, version = "0.3.0" }
ron = { optional = true, version = "0.1.5" }
ron = { optional = true, version = "0.1.7" }
[dev-dependencies]
angle = {git = "https://github.com/servo/angle", branch = "servo"}

View File

@ -6,6 +6,7 @@
varying vec3 vUv;
flat varying vec4 vColor;
flat varying vec4 vStRect;
#ifdef WR_VERTEX_SHADER
// Draw a text run to a cache target. These are always
@ -41,12 +42,18 @@ void main(void) {
vUv = vec3(mix(st0, st1, aPosition.xy), res.layer);
vColor = prim.task.color;
// We clamp the texture coordinates to the half-pixel offset from the borders
// in order to avoid sampling outside of the texture area.
vec2 half_texel = vec2(0.5) / texture_size;
vStRect = vec4(min(st0, st1) + half_texel, max(st0, st1) - half_texel);
}
#endif
#ifdef WR_FRAGMENT_SHADER
void main(void) {
float a = texture(sColor0, vUv).a;
vec2 uv = clamp(vUv.xy, vStRect.xy, vStRect.zw);
float a = texture(sColor0, vec3(uv, vUv.z)).a;
oFragColor = vColor * a;
}
#endif

View File

@ -153,6 +153,7 @@ void main(void) {
vec4 edge_distances;
vec4 color0, color1;
vec2 color_delta;
vec4 edge_mask;
// TODO(gw): Now that all border styles are supported, the switch
// statement below can be tidied up quite a bit.
@ -180,6 +181,7 @@ void main(void) {
color_delta = vec2(1.0);
vIsBorderRadiusLessThanBorderWidth = any(lessThan(border.radii[0].xy,
border.widths.xy)) ? 1.0 : 0.0;
edge_mask = vec4(1.0, 1.0, 0.0, 0.0);
break;
}
case 1: {
@ -206,6 +208,7 @@ void main(void) {
color_delta = vec2(1.0, -1.0);
vIsBorderRadiusLessThanBorderWidth = any(lessThan(border.radii[0].zw,
border.widths.zy)) ? 1.0 : 0.0;
edge_mask = vec4(0.0, 1.0, 1.0, 0.0);
break;
}
case 2: {
@ -232,6 +235,7 @@ void main(void) {
color_delta = vec2(-1.0);
vIsBorderRadiusLessThanBorderWidth = any(lessThan(border.radii[1].xy,
border.widths.zw)) ? 1.0 : 0.0;
edge_mask = vec4(0.0, 0.0, 1.0, 1.0);
break;
}
case 3: {
@ -258,6 +262,7 @@ void main(void) {
color_delta = vec2(-1.0, 1.0);
vIsBorderRadiusLessThanBorderWidth = any(lessThan(border.radii[1].zw,
border.widths.xw)) ? 1.0 : 0.0;
edge_mask = vec4(1.0, 0.0, 0.0, 1.0);
break;
}
}
@ -301,7 +306,7 @@ void main(void) {
VertexInfo vi = write_transform_vertex(segment_rect,
prim.local_rect,
prim.local_clip_rect,
vec4(1.0),
edge_mask,
prim.z,
prim.scroll_node,
prim.task);

View File

@ -152,6 +152,8 @@ void main(void) {
bool color_flip;
RectWithSize segment_rect;
vec4 edge_mask;
switch (sub_part) {
case 0: {
segment_rect.p0 = vec2(corners.tl_outer.x, corners.tl_inner.y);
@ -165,6 +167,7 @@ void main(void) {
segment_rect.size.y,
segment_rect.p0.y,
segment_rect.p0.x + 0.5 * segment_rect.size.x);
edge_mask = vec4(1.0, 0.0, 1.0, 0.0);
break;
}
case 1: {
@ -179,6 +182,7 @@ void main(void) {
segment_rect.size.x,
segment_rect.p0.x,
segment_rect.p0.y + 0.5 * segment_rect.size.y);
edge_mask = vec4(0.0, 1.0, 0.0, 1.0);
break;
}
case 2: {
@ -193,6 +197,7 @@ void main(void) {
segment_rect.size.y,
segment_rect.p0.y,
segment_rect.p0.x + 0.5 * segment_rect.size.x);
edge_mask = vec4(1.0, 0.0, 1.0, 0.0);
break;
}
case 3: {
@ -207,6 +212,7 @@ void main(void) {
segment_rect.size.x,
segment_rect.p0.x,
segment_rect.p0.y + 0.5 * segment_rect.size.y);
edge_mask = vec4(0.0, 1.0, 0.0, 1.0);
break;
}
}
@ -219,7 +225,7 @@ void main(void) {
VertexInfo vi = write_transform_vertex(segment_rect,
prim.local_rect,
prim.local_clip_rect,
vec4(1.0),
edge_mask,
prim.z,
prim.scroll_node,
prim.task);

View File

@ -24,7 +24,10 @@ impl CaptureConfig {
CaptureConfig {
root,
bits,
pretty: ser::PrettyConfig::default(),
pretty: ser::PrettyConfig {
enumerate_arrays: true,
.. ser::PrettyConfig::default()
},
}
}

View File

@ -1793,7 +1793,7 @@ impl FrameBuilder {
}
}
let gpu_cache_updates = gpu_cache.end_frame(gpu_cache_profile);
let gpu_cache_frame_id = gpu_cache.end_frame(gpu_cache_profile);
render_tasks.build();
@ -1811,7 +1811,7 @@ impl FrameBuilder {
clip_chain_local_clip_rects,
render_tasks,
deferred_resolves,
gpu_cache_updates: Some(gpu_cache_updates),
gpu_cache_frame_id,
has_been_rendered: false,
has_texture_cache_tasks,
}

View File

@ -221,15 +221,18 @@ pub enum GpuCacheUpdate {
},
}
#[must_use]
#[cfg_attr(feature = "capture", derive(Deserialize, Serialize))]
pub struct GpuCacheUpdateList {
// The current height of the texture. The render thread
// should resize the texture if required.
/// The frame current update list was generated from.
pub frame_id: FrameId,
/// The current height of the texture. The render thread
/// should resize the texture if required.
pub height: u32,
// List of updates to apply.
/// List of updates to apply.
pub updates: Vec<GpuCacheUpdate>,
// A flat list of GPU blocks that are pending upload
// to GPU memory.
/// A flat list of GPU blocks that are pending upload
/// to GPU memory.
pub blocks: Vec<GpuBlockData>,
}
@ -595,9 +598,9 @@ impl GpuCache {
/// End the frame. Return the list of updates to apply to the
/// device specific cache texture.
pub fn end_frame(
&mut self,
&self,
profile_counters: &mut GpuCacheProfileCounters,
) -> GpuCacheUpdateList {
) -> FrameId {
profile_counters
.allocated_rows
.set(self.texture.rows.len());
@ -607,8 +610,13 @@ impl GpuCache {
profile_counters
.saved_blocks
.set(self.saved_block_count);
self.frame_id
}
/// Extract the pending updates from the cache.
pub fn extract_updates(&mut self) -> GpuCacheUpdateList {
GpuCacheUpdateList {
frame_id: self.frame_id,
height: self.texture.height,
updates: mem::replace(&mut self.texture.updates, Vec::new()),
blocks: mem::replace(&mut self.texture.pending_blocks, Vec::new()),

View File

@ -7,6 +7,7 @@ use api::{ExternalImageData, ExternalImageId};
use api::{ImageFormat, PipelineId};
use api::DebugCommand;
use device::TextureFilter;
use gpu_cache::GpuCacheUpdateList;
use fxhash::FxHasher;
use profiler::BackendProfileCounters;
use std::{usize, i32};
@ -161,14 +162,15 @@ pub enum ResultMsg {
DebugCommand(DebugCommand),
DebugOutput(DebugOutput),
RefreshShader(PathBuf),
UpdateGpuCache(GpuCacheUpdateList),
UpdateResources {
updates: TextureUpdateList,
cancel_rendering: bool,
},
PublishDocument(
DocumentId,
RenderedDocument,
TextureUpdateList,
BackendProfileCounters,
),
UpdateResources {
updates: TextureUpdateList,
cancel_rendering: bool,
},
}

View File

@ -6,9 +6,9 @@ use api::{ApiMsg, BlobImageRenderer, BuiltDisplayList, DebugCommand, DeviceIntPo
#[cfg(feature = "debugger")]
use api::{BuiltDisplayListIter, SpecificDisplayItem};
use api::{DevicePixelScale, DeviceUintPoint, DeviceUintRect, DeviceUintSize};
use api::{DocumentId, DocumentLayer, DocumentMsg};
use api::{IdNamespace, PipelineId, RenderNotifier};
use api::channel::{MsgReceiver, PayloadReceiver, PayloadReceiverHelperMethods};
use api::{DocumentId, DocumentLayer, DocumentMsg, HitTestFlags, HitTestResult};
use api::{IdNamespace, PipelineId, RenderNotifier, WorldPoint};
use api::channel::{MsgReceiver, MsgSender, PayloadReceiver, PayloadReceiverHelperMethods};
use api::channel::{PayloadSender, PayloadSenderHelperMethods};
#[cfg(feature = "capture")]
use api::CapturedDocument;
@ -20,7 +20,7 @@ use frame::FrameContext;
use frame_builder::{FrameBuilder, FrameBuilderConfig};
use gpu_cache::GpuCache;
use internal_types::{DebugOutput, FastHashMap, FastHashSet, RenderedDocument, ResultMsg};
use profiler::{BackendProfileCounters, ResourceProfileCounters};
use profiler::{BackendProfileCounters, IpcProfileCounters, ResourceProfileCounters};
use rayon::ThreadPool;
use record::ApiRecordingReceiver;
use resource_cache::ResourceCache;
@ -153,10 +153,13 @@ impl Document {
}
}
type HitTestQuery = (Option<PipelineId>, WorldPoint, HitTestFlags, MsgSender<HitTestResult>);
struct DocumentOps {
scroll: bool,
build: bool,
render: bool,
queries: Vec<HitTestQuery>,
}
impl DocumentOps {
@ -165,6 +168,7 @@ impl DocumentOps {
scroll: false,
build: false,
render: false,
queries: vec![],
}
}
@ -175,10 +179,11 @@ impl DocumentOps {
}
}
fn combine(&mut self, other: Self) {
fn combine(&mut self, mut other: Self) {
self.scroll = self.scroll || other.scroll;
self.build = self.build || other.build;
self.render = self.render || other.render;
self.queries.extend(other.queries.drain(..));
}
}
@ -260,7 +265,8 @@ impl RenderBackend {
document_id: DocumentId,
message: DocumentMsg,
frame_counter: u32,
profile_counters: &mut BackendProfileCounters,
ipc_profile_counters: &mut IpcProfileCounters,
resource_profile_counters: &mut ResourceProfileCounters,
) -> DocumentOps {
let doc = self.documents.get_mut(&document_id).expect("No document?");
@ -332,7 +338,6 @@ impl RenderBackend {
let display_list_received_time = precise_time_ns();
{
let _timer = profile_counters.total_time.timer();
doc.scene.set_display_list(
pipeline_id,
epoch,
@ -352,7 +357,7 @@ impl RenderBackend {
// really simple and cheap to access, so it's not a big deal.
let display_list_consumed_time = precise_time_ns();
profile_counters.ipc.set(
ipc_profile_counters.set(
builder_start_time,
builder_finish_time,
send_start_time,
@ -368,7 +373,7 @@ impl RenderBackend {
self.resource_cache.update_resources(
updates,
&mut profile_counters.resources
resource_profile_counters
);
DocumentOps::nop()
@ -396,59 +401,44 @@ impl RenderBackend {
}
DocumentMsg::Scroll(delta, cursor, move_phase) => {
profile_scope!("Scroll");
let _timer = profile_counters.total_time.timer();
let should_render = doc.frame_ctx.scroll(delta, cursor, move_phase)
&& doc.render_on_scroll == Some(true);
DocumentOps {
scroll: true,
build: false,
render: should_render,
..DocumentOps::nop()
}
}
DocumentMsg::HitTest(pipeline_id, point, flags, tx) => {
profile_scope!("HitTest");
if doc.render_on_hittest {
doc.render(
&mut self.resource_cache,
&mut self.gpu_cache,
&mut profile_counters.resources,
);
doc.render_on_hittest = false;
DocumentOps {
render: doc.render_on_hittest,
queries: vec![(pipeline_id, point, flags, tx)],
..DocumentOps::nop()
}
let cst = doc.frame_ctx.get_clip_scroll_tree();
let result = doc.frame_builder
.as_ref()
.unwrap()
.hit_test(cst, pipeline_id, point, flags);
tx.send(result).unwrap();
DocumentOps::nop()
}
DocumentMsg::ScrollNodeWithId(origin, id, clamp) => {
profile_scope!("ScrollNodeWithScrollId");
let _timer = profile_counters.total_time.timer();
let should_render = doc.frame_ctx.scroll_node(origin, id, clamp)
&& doc.render_on_scroll == Some(true);
DocumentOps {
scroll: true,
build: false,
render: should_render,
..DocumentOps::nop()
}
}
DocumentMsg::TickScrollingBounce => {
profile_scope!("TickScrollingBounce");
let _timer = profile_counters.total_time.timer();
doc.frame_ctx.tick_scrolling_bounce_animations();
DocumentOps {
scroll: true,
build: false,
render: doc.render_on_scroll == Some(true),
..DocumentOps::nop()
}
}
DocumentMsg::GetScrollNodeState(tx) => {
@ -471,8 +461,6 @@ impl RenderBackend {
DocumentOps::build()
}
DocumentMsg::GenerateFrame => {
let _timer = profile_counters.total_time.timer();
let mut op = DocumentOps::nop();
if let Some(ref mut ros) = doc.render_on_scroll {
@ -656,12 +644,14 @@ impl RenderBackend {
) {
let mut op = DocumentOps::nop();
for doc_msg in doc_msgs {
let _timer = profile_counters.total_time.timer();
op.combine(
self.process_document(
document_id,
doc_msg,
*frame_counter,
profile_counters,
&mut profile_counters.ipc,
&mut profile_counters.resources,
)
);
}
@ -669,6 +659,7 @@ impl RenderBackend {
let doc = self.documents.get_mut(&document_id).unwrap();
if op.build {
let _timer = profile_counters.total_time.timer();
profile_scope!("build scene");
doc.build_scene(&mut self.resource_cache);
doc.render_on_hittest = true;
@ -678,17 +669,28 @@ impl RenderBackend {
profile_scope!("generate frame");
*frame_counter += 1;
let rendered_document = doc.render(
&mut self.resource_cache,
&mut self.gpu_cache,
&mut profile_counters.resources,
);
info!("generated frame for document {:?} with {} passes",
document_id, rendered_document.frame.passes.len());
// borrow ck hack for profile_counters
let (pending_update, rendered_document) = {
let _timer = profile_counters.total_time.timer();
let rendered_document = doc.render(
&mut self.resource_cache,
&mut self.gpu_cache,
&mut profile_counters.resources,
);
info!("generated frame for document {:?} with {} passes",
document_id, rendered_document.frame.passes.len());
let msg = ResultMsg::UpdateGpuCache(self.gpu_cache.extract_updates());
self.result_tx.send(msg).unwrap();
let pending_update = self.resource_cache.pending_updates();
(pending_update, rendered_document)
};
// Publish the frame
let pending_update = self.resource_cache.pending_updates();
let msg = ResultMsg::PublishDocument(
document_id,
rendered_document,
@ -703,6 +705,16 @@ impl RenderBackend {
if op.render || op.scroll {
self.notifier.new_document_ready(document_id, op.scroll, op.render);
}
for (pipeline_id, point, flags, tx) in op.queries {
profile_scope!("HitTest");
let cst = doc.frame_ctx.get_clip_scroll_tree();
let result = doc.frame_builder
.as_ref()
.unwrap()
.hit_test(cst, pipeline_id, point, flags);
tx.send(result).unwrap();
}
}
#[cfg(not(feature = "debugger"))]

View File

@ -1656,6 +1656,8 @@ pub struct Renderer {
render_task_texture: VertexDataTexture,
gpu_cache_texture: CacheTexture,
gpu_cache_frame_id: FrameId,
pipeline_epoch_map: FastHashMap<PipelineId, Epoch>,
// Manages and resolves source textures IDs to real texture IDs.
@ -2313,6 +2315,7 @@ impl Renderer {
cpu_profiles: VecDeque::new(),
gpu_profiles: VecDeque::new(),
gpu_cache_texture,
gpu_cache_frame_id: FrameId::new(0),
texture_cache_upload_pbo,
texture_resolver,
renderer_errors: Vec::new(),
@ -2368,19 +2371,17 @@ impl Renderer {
/// Should be called before `render()`, as texture cache updates are done here.
pub fn update(&mut self) {
profile_scope!("update");
// Pull any pending results and return the most recent.
while let Ok(msg) = self.result_rx.try_recv() {
match msg {
ResultMsg::PublishDocument(
document_id,
mut doc,
doc,
texture_update_list,
profile_counters,
) => {
//TODO: associate `document_id` with target window
self.pending_texture_updates.push(texture_update_list);
self.pending_gpu_cache_updates.extend(doc.frame.gpu_cache_updates.take());
self.backend_profile_counters = profile_counters;
// Update the list of available epochs for use during reftests.
@ -2404,6 +2405,9 @@ impl Renderer {
None => self.active_documents.push((document_id, doc)),
}
}
ResultMsg::UpdateGpuCache(list) => {
self.pending_gpu_cache_updates.push(list);
}
ResultMsg::UpdateResources {
updates,
cancel_rendering,
@ -2753,7 +2757,6 @@ impl Renderer {
framebuffer_size: Option<DeviceUintSize>
) -> Result<RendererStats, Vec<RendererError>> {
profile_scope!("render");
if self.active_documents.is_empty() {
self.last_time = precise_time_ns();
return Ok(RendererStats::empty());
@ -2844,6 +2847,7 @@ impl Renderer {
for &mut (_, RenderedDocument { ref mut frame, .. }) in &mut active_documents {
self.prepare_gpu_cache(frame);
assert!(frame.gpu_cache_frame_id <= self.gpu_cache_frame_id);
self.draw_tile_frame(
frame,
@ -2896,6 +2900,7 @@ impl Renderer {
}
}
self.backend_profile_counters.reset();
self.profile_counters.reset();
self.profile_counters.frame_counter.inc();
@ -2931,6 +2936,7 @@ impl Renderer {
let gpu_cache_height = self.gpu_cache_texture.get_height();
if gpu_cache_height != 0 && GPU_CACHE_RESIZE_TEST {
self.pending_gpu_cache_updates.push(GpuCacheUpdateList {
frame_id: FrameId::new(0),
height: gpu_cache_height,
blocks: vec![[1f32; 4].into()],
updates: Vec::new(),
@ -2955,6 +2961,9 @@ impl Renderer {
for update_list in self.pending_gpu_cache_updates.drain(..) {
assert!(update_list.height <= max_requested_height);
if update_list.frame_id > self.gpu_cache_frame_id {
self.gpu_cache_frame_id = update_list.frame_id
}
self.gpu_cache_texture
.update(&mut self.device, &update_list);
}
@ -4089,6 +4098,7 @@ impl Renderer {
.expect("Found external image, but no handler set!");
let mut list = GpuCacheUpdateList {
frame_id: FrameId::new(0),
height: self.gpu_cache_texture.get_height(),
blocks: Vec::new(),
updates: Vec::new(),

View File

@ -9,8 +9,8 @@ use api::{LayerRect, MixBlendMode, PipelineId};
use batch::{AlphaBatcher, ClipBatcher, resolve_image};
use clip::{ClipStore};
use clip_scroll_tree::{ClipScrollTree};
use device::Texture;
use gpu_cache::{GpuCache, GpuCacheUpdateList};
use device::{FrameId, Texture};
use gpu_cache::{GpuCache};
use gpu_types::{BlurDirection, BlurInstance, BrushInstance, ClipChainRectIndex};
use gpu_types::{ClipScrollNodeData, ClipScrollNodeIndex};
use gpu_types::{PrimitiveInstance};
@ -861,6 +861,7 @@ impl CompositeOps {
/// and presented to the renderer.
#[cfg_attr(feature = "capture", derive(Deserialize, Serialize))]
pub struct Frame {
//TODO: share the fields with DocumentView struct
pub window_size: DeviceUintSize,
pub inner_rect: DeviceUintRect,
pub background_color: Option<ColorF>,
@ -874,22 +875,21 @@ pub struct Frame {
pub clip_chain_local_clip_rects: Vec<LayerRect>,
pub render_tasks: RenderTaskTree,
// List of updates that need to be pushed to the
// gpu resource cache.
pub gpu_cache_updates: Option<GpuCacheUpdateList>,
/// The GPU cache frame that the contents of Self depend on
pub gpu_cache_frame_id: FrameId,
// List of textures that we don't know about yet
// from the backend thread. The render thread
// will use a callback to resolve these and
// patch the data structures.
/// List of textures that we don't know about yet
/// from the backend thread. The render thread
/// will use a callback to resolve these and
/// patch the data structures.
pub deferred_resolves: Vec<DeferredResolve>,
// True if this frame contains any render tasks
// that write to the texture cache.
/// True if this frame contains any render tasks
/// that write to the texture cache.
pub has_texture_cache_tasks: bool,
// True if this frame has been drawn by the
// renderer.
/// True if this frame has been drawn by the
/// renderer.
pub has_been_rendered: bool,
}

7
gfx/wrench/.gitignore vendored Normal file
View File

@ -0,0 +1,7 @@
Cargo.lock
target/
*#
*~
yaml_frames/
json_frames/
bin_frames/

44
gfx/wrench/Cargo.toml Normal file
View File

@ -0,0 +1,44 @@
[package]
name = "wrench"
version = "0.2.6"
authors = ["Vladimir Vukicevic <vladimir@pobox.com>"]
build = "build.rs"
license = "MPL-2.0"
[dependencies]
base64 = "0.3"
bincode = "0.9"
byteorder = "1.0"
env_logger = { version = "0.4", optional = true }
euclid = "0.16"
gleam = "0.4"
servo-glutin = "0.13"
app_units = "0.6"
image = "0.17"
clap = { version = "2", features = ["yaml"] }
lazy_static = "1"
log = "0.3"
yaml-rust = { git = "https://github.com/vvuk/yaml-rust", features = ["preserve_order"] }
serde_json = "1.0"
ron = "0.1.5"
time = "0.1"
crossbeam = "0.2"
osmesa-sys = { version = "0.1.2", optional = true }
osmesa-src = { git = "https://github.com/servo/osmesa-src", optional = true }
webrender = {path = "../webrender", features=["capture","debugger","png","profiler"]}
webrender_api = {path = "../webrender_api", features=["debug-serialization"]}
serde = {version = "1.0", features = ["derive"] }
[target.'cfg(target_os = "macos")'.dependencies]
core-graphics = "0.12.4"
core-foundation = "0.4"
[features]
headless = [ "osmesa-sys", "osmesa-src" ]
logging = [ "env_logger" ]
[target.'cfg(target_os = "windows")'.dependencies]
dwrote = "0.4.1"
[target.'cfg(any(target_os = "linux", target_os = "macos"))'.dependencies]
font-loader = "0.5"

35
gfx/wrench/README.md Normal file
View File

@ -0,0 +1,35 @@
# wrench
`wrench` is a tool for debugging webrender outside of a browser engine.
## headless
`wrench` has an optional headless mode for use in continuous integration. To run in headless mode, instead of using `cargo run -- args`, use `./headless.py args`.
## `replay` and `show`
Binary recordings can be generated by webrender and replayed with `wrench replay`. Enable binary recording in `RendererOptions`.
```rust
RendererOptions {
...
recorder: Some(Box::new(BinaryRecorder::new("wr-frame.bin"))),
...
}
```
If you are working on gecko integration you can enable recording in `webrender_bindings/src/bindings.rs` by setting
```rust
static ENABLE_RECORDING: bool = true;
```
`wrench replay --save yaml` will convert the recording into frames described in yaml. Frames can then be replayed with `wrench show`.
## `reftest`
Wrench also has a reftest system for catching regressions.
* To run all reftests, run `./headless.py reftest`
* To run specific reftests, run `./headless.py reftest path/to/test/or/dir`
* To examine test failures, use the [reftest analyzer](https://hg.mozilla.org/mozilla-central/raw-file/tip/layout/tools/reftest/reftest-analyzer.xhtml)
* To add a new reftest, create an example frame and a reference frame in `reftests/` and then add an entry to `reftests/reftest.list`

28
gfx/wrench/build.rs Normal file
View File

@ -0,0 +1,28 @@
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use std::env;
use std::fs;
use std::path::PathBuf;
fn main() {
let target = env::var("TARGET").unwrap();
let out_dir = env::var_os("OUT_DIR").unwrap();
let out_dir = PathBuf::from(out_dir);
println!("cargo:rerun-if-changed=res/wrench.exe.manifest");
if target.contains("windows") {
let src = PathBuf::from("res/wrench.exe.manifest");
let mut dst = out_dir
.parent()
.unwrap()
.parent()
.unwrap()
.parent()
.unwrap()
.to_owned();
dst.push("wrench.exe.manifest");
fs::copy(&src, &dst).unwrap();
}
}

View File

@ -0,0 +1,24 @@
<?xml version="1.0" encoding="UTF-8" standalone="yes"?>
<assembly xmlns="urn:schemas-microsoft-com:asm.v1"
manifestVersion="1.0"
xmlns:asmv3="urn:schemas-microsoft-com:asm.v3">
<assemblyIdentity type="win32"
name="webrender.Wrench"
version="0.1.0.0"/>
<compatibility xmlns="urn:schemas-microsoft-com:compatibility.v1">
<application>
<supportedOS Id="{35138b9a-5d96-4fbd-8e2d-a2440225f93a}"/> <!-- Windows 7 -->
<supportedOS Id="{4a2f28e3-53b9-4441-ba9c-d69d4a4a6e38}"/> <!-- Windows 8 -->
<supportedOS Id="{1f676c76-80e1-4239-95bb-83d0f6d0da78}"/> <!-- Windows 8.1 -->
<supportedOS Id="{8e0f7a12-bfb3-4fe8-b9a5-48fd50a15a9a}"/> <!-- Windows 10 -->
</application>
</compatibility>
<asmv3:application>
<asmv3:windowsSettings xmlns="http://schemas.microsoft.com/SMI/2005/WindowsSettings">
<dpiAware>true</dpiAware>
</asmv3:windowsSettings>
</asmv3:application>
</assembly>

164
gfx/wrench/src/args.yaml Normal file
View File

@ -0,0 +1,164 @@
name: wrench
version: "0.1"
author: Vladimir Vukicevic <vladimir@pobox.com>
about: WebRender testing and debugging utility
args:
- precache:
short: c
long: precache
help: Precache shaders
- verbose:
short: v
long: verbose
help: Enable verbose display
- zoom:
short: z
long: zoom
help: Set zoom factor
takes_value: true
- debug:
short: d
long: debug
help: Enable debug renderer
- shaders:
long: shaders
help: Override path for shaders
takes_value: true
- rebuild:
short: r
long: rebuild
help: Rebuild display list from scratch every frame
- save:
long: save
help: 'Save frames, one of: yaml, json, ron, or binary'
takes_value: true
- no_subpixel_aa:
short: a
long: no-subpixel-aa
help: Disable subpixel aa
- slow_subpixel:
long: slow-subpixel
help: Disable dual source blending
- headless:
short: h
long: headless
help: Enable headless rendering
- dp_ratio:
short: p
long: device-pixel-ratio
help: Device pixel ratio
takes_value: true
- size:
short: s
long: size
help: Window size, specified as widthxheight (e.g. 1024x768), in pixels
takes_value: true
- time:
short: t
long: time
help: Time limit (in seconds)
takes_value: true
- vsync:
long: vsync
help: Enable vsync for OpenGL window
- no_scissor:
long: no-scissor
help: Disable scissors when clearing render targets
- no_batch:
long: no-batch
help: Disable batching of instanced draw calls
subcommands:
- png:
about: render frame described by YAML and save it to a png file
args:
- surface:
short: s
long: surface
help: 'What rendered surface to save as PNG, one of: screen, gpu-cache'
takes_value: true
- INPUT:
help: The input YAML file
required: true
index: 1
- show:
about: show frame(s) described by YAML
args:
- queue:
short: q
long: queue
help: How many frames to submit to WR ahead of time (default 1)
takes_value: true
- include:
long: include
help: Include the given element type. Can be specified multiple times. (rect/image/text/glyphs/border)
multiple: true
takes_value: true
- list-resources:
long: list-resources
help: List the resources used by this YAML file
- watch:
short: w
long: watch
help: Watch the given YAML file, reloading whenever it changes
- INPUT:
help: The input YAML file
required: true
index: 1
- replay:
about: replay binary recording
args:
- api:
long: api
help: Reissue Api messsages for each frame
- skip-uploads:
long: skip-uploads
help: Skip re-uploads while reissuing Api messages (BROKEN)
- play:
long: play
help: Play entire recording through, then quit (useful with --save)
- INPUT:
help: The input binary file or directory
required: true
index: 1
- reftest:
about: run reftests
args:
- fuzz_tolerance:
long: fuzzy
takes_value: true
help: Add a minimum fuzziness tolerance to all tests.
required: false
- REFTEST:
help: a specific reftest or directory to run
required: false
index: 1
- rawtest:
about: run rawtests
- perf:
about: run benchmarks
args:
- filename:
help: name of the file to save benchmarks to
required: true
index: 1
- compare_perf:
about: compare two benchmark files
args:
- first_filename:
help: first benchmark file to compare
required: true
index: 1
- second_filename:
help: second benchmark file to compare
required: true
index: 2
- load:
about: load a capture
args:
- path:
help: directory containing the capture
takes_value: true
required: true
index: 1

View File

@ -0,0 +1,237 @@
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use bincode::deserialize;
use byteorder::{LittleEndian, ReadBytesExt};
use clap;
use std::any::TypeId;
use std::fs::File;
use std::io::{Read, Seek, SeekFrom};
use std::path::{Path, PathBuf};
use std::{mem, process};
use webrender::WEBRENDER_RECORDING_HEADER;
use webrender::api::{ApiMsg, DocumentMsg};
use wrench::{Wrench, WrenchThing};
#[derive(Clone)]
enum Item {
Message(ApiMsg),
Data(Vec<u8>),
}
pub struct BinaryFrameReader {
file: File,
eof: bool,
frame_offsets: Vec<u64>,
skip_uploads: bool,
replay_api: bool,
play_through: bool,
frame_data: Vec<Item>,
frame_num: u32,
frame_read: bool,
}
impl BinaryFrameReader {
pub fn new(file_path: &Path) -> BinaryFrameReader {
let mut file = File::open(&file_path).expect("Can't open recording file");
let header = file.read_u64::<LittleEndian>().unwrap();
assert_eq!(
header,
WEBRENDER_RECORDING_HEADER,
"Binary recording is missing recording header!"
);
let apimsg_type_id = unsafe {
assert_eq!(mem::size_of::<TypeId>(), mem::size_of::<u64>());
mem::transmute::<TypeId, u64>(TypeId::of::<ApiMsg>())
};
let written_apimsg_type_id = file.read_u64::<LittleEndian>().unwrap();
if written_apimsg_type_id != apimsg_type_id {
println!(
"Warning: binary file ApiMsg type mismatch: expected 0x{:x}, found 0x{:x}",
apimsg_type_id,
written_apimsg_type_id
);
}
BinaryFrameReader {
file,
eof: false,
frame_offsets: vec![],
skip_uploads: false,
replay_api: false,
play_through: false,
frame_data: vec![],
frame_num: 0,
frame_read: false,
}
}
pub fn new_from_args(args: &clap::ArgMatches) -> BinaryFrameReader {
let bin_file = args.value_of("INPUT").map(|s| PathBuf::from(s)).unwrap();
let mut r = BinaryFrameReader::new(&bin_file);
r.skip_uploads = args.is_present("skip-uploads");
r.replay_api = args.is_present("api");
r.play_through = args.is_present("play");
r
}
// FIXME I don't think we can skip uploads without also skipping
// payload (I think? Unused payload ranges may also be ignored.). But
// either way we'd need to track image updates and deletions -- if we
// delete an image, we can't go back to a previous frame.
//
// We could probably introduce a mode where going backwards replays all
// frames up until that frame, so that we know we can be accurate.
fn should_skip_upload_msg(&self, msg: &ApiMsg) -> bool {
if !self.skip_uploads {
return false;
}
match *msg {
ApiMsg::UpdateResources(..) => true,
_ => false,
}
}
// a frame exists if we either haven't hit eof yet, or if
// we have, then if we've seen its offset.
fn frame_exists(&self, frame: u32) -> bool {
!self.eof || (frame as usize) < self.frame_offsets.len()
}
}
impl WrenchThing for BinaryFrameReader {
fn do_frame(&mut self, wrench: &mut Wrench) -> u32 {
// save where the frame begins as we read through the file
if self.frame_num as usize >= self.frame_offsets.len() {
self.frame_num = self.frame_offsets.len() as u32;
let pos = self.file.seek(SeekFrom::Current(0)).unwrap();
println!("Frame {} offset: {}", self.frame_offsets.len(), pos);
self.frame_offsets.push(pos);
}
let first_time = !self.frame_read;
if first_time {
let offset = self.frame_offsets[self.frame_num as usize];
self.file.seek(SeekFrom::Start(offset)).unwrap();
wrench.set_title(&format!("frame {}", self.frame_num));
self.frame_data.clear();
let mut found_frame_marker = false;
let mut found_display_list = false;
let mut found_pipeline = false;
while let Ok(mut len) = self.file.read_u32::<LittleEndian>() {
if len > 0 {
let mut buffer = vec![0; len as usize];
self.file.read_exact(&mut buffer).unwrap();
let msg = deserialize(&buffer).unwrap();
let mut store_message = true;
// In order to detect the first valid frame, we
// need to find:
// (a) SetRootPipeline
// (b) SetDisplayList
// (c) GenerateFrame that occurs *after* (a) and (b)
match msg {
ApiMsg::UpdateDocument(_, ref doc_msgs) => {
for doc_msg in doc_msgs {
match *doc_msg {
DocumentMsg::GenerateFrame => {
found_frame_marker = true;
}
DocumentMsg::SetDisplayList { .. } => {
found_frame_marker = false;
found_display_list = true;
}
DocumentMsg::SetRootPipeline(..) => {
found_frame_marker = false;
found_pipeline = true;
}
_ => {}
}
}
}
// Wrench depends on the document always existing
ApiMsg::DeleteDocument(_) => {
store_message = false;
}
_ => {}
}
if store_message {
self.frame_data.push(Item::Message(msg));
}
// Frames are marked by the GenerateFrame message.
// On the first frame, we additionally need to find at least
// a SetDisplayList and a SetRootPipeline.
// After the first frame, any GenerateFrame message marks a new
// frame being rendered.
if found_frame_marker && (self.frame_num > 0 || (found_display_list && found_pipeline)) {
break;
}
} else {
len = self.file.read_u32::<LittleEndian>().unwrap();
let mut buffer = vec![0; len as usize];
self.file.read_exact(&mut buffer).unwrap();
self.frame_data.push(Item::Data(buffer));
}
}
if self.eof == false &&
self.file.seek(SeekFrom::Current(0)).unwrap() == self.file.metadata().unwrap().len()
{
self.eof = true;
}
self.frame_read = true;
}
if first_time || self.replay_api {
wrench.begin_frame();
let frame_items = self.frame_data.clone();
for item in frame_items {
match item {
Item::Message(msg) => if !self.should_skip_upload_msg(&msg) {
wrench.api.send_message(msg);
},
Item::Data(buf) => {
wrench.api.send_payload(&buf);
}
}
}
} else if self.play_through {
if !self.frame_exists(self.frame_num + 1) {
process::exit(0);
}
self.next_frame();
self.do_frame(wrench);
} else {
wrench.refresh();
}
self.frame_num
}
// note that we don't loop here; we could, but that'll require
// some tracking to avoid reuploading resources every run. We
// sort of try in should_skip_upload_msg, but this needs work.
fn next_frame(&mut self) {
if self.frame_exists(self.frame_num + 1) {
self.frame_num += 1;
self.frame_read = false;
}
}
fn prev_frame(&mut self) {
if self.frame_num > 0 {
self.frame_num -= 1;
self.frame_read = false;
}
}
}

162
gfx/wrench/src/blob.rs Normal file
View File

@ -0,0 +1,162 @@
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
// A very basic BlobImageRenderer that can only render a checkerboard pattern.
use std::collections::HashMap;
use std::sync::Arc;
use std::sync::Mutex;
use webrender::api::*;
// Serialize/deserialze the blob.
pub fn serialize_blob(color: ColorU) -> Vec<u8> {
vec![color.r, color.g, color.b, color.a]
}
fn deserialize_blob(blob: &[u8]) -> Result<ColorU, ()> {
let mut iter = blob.iter();
return match (iter.next(), iter.next(), iter.next(), iter.next()) {
(Some(&r), Some(&g), Some(&b), Some(&a)) => Ok(ColorU::new(r, g, b, a)),
(Some(&a), None, None, None) => Ok(ColorU::new(a, a, a, a)),
_ => Err(()),
};
}
// This is the function that applies the deserialized drawing commands and generates
// actual image data.
fn render_blob(
color: ColorU,
descriptor: &BlobImageDescriptor,
tile: Option<TileOffset>,
) -> BlobImageResult {
// Allocate storage for the result. Right now the resource cache expects the
// tiles to have have no stride or offset.
let mut texels = Vec::with_capacity((descriptor.width * descriptor.height * 4) as usize);
// Generate a per-tile pattern to see it in the demo. For a real use case it would not
// make sense for the rendered content to depend on its tile.
let tile_checker = match tile {
Some(tile) => (tile.x % 2 == 0) != (tile.y % 2 == 0),
None => true,
};
for y in 0 .. descriptor.height {
for x in 0 .. descriptor.width {
// Apply the tile's offset. This is important: all drawing commands should be
// translated by this offset to give correct results with tiled blob images.
let x2 = x + descriptor.offset.x as u32;
let y2 = y + descriptor.offset.y as u32;
// Render a simple checkerboard pattern
let checker = if (x2 % 20 >= 10) != (y2 % 20 >= 10) {
1
} else {
0
};
// ..nested in the per-tile cherkerboard pattern
let tc = if tile_checker { 0 } else { (1 - checker) * 40 };
match descriptor.format {
ImageFormat::BGRA8 => {
texels.push(color.b * checker + tc);
texels.push(color.g * checker + tc);
texels.push(color.r * checker + tc);
texels.push(color.a * checker + tc);
}
ImageFormat::R8 => {
texels.push(color.a * checker + tc);
}
_ => {
return Err(BlobImageError::Other(
format!("Usupported image format {:?}", descriptor.format),
));
}
}
}
}
Ok(RasterizedBlobImage {
data: texels,
width: descriptor.width,
height: descriptor.height,
})
}
pub struct BlobCallbacks {
pub request: Box<Fn(&BlobImageRequest) + Send + 'static>,
pub resolve: Box<Fn() + Send + 'static>,
}
impl BlobCallbacks {
pub fn new() -> Self {
BlobCallbacks { request: Box::new(|_|()), resolve: Box::new(|| (())) }
}
}
pub struct CheckerboardRenderer {
image_cmds: HashMap<ImageKey, ColorU>,
callbacks: Arc<Mutex<BlobCallbacks>>,
// The images rendered in the current frame (not kept here between frames).
rendered_images: HashMap<BlobImageRequest, BlobImageResult>,
}
impl CheckerboardRenderer {
pub fn new(callbacks: Arc<Mutex<BlobCallbacks>>) -> Self {
CheckerboardRenderer {
callbacks,
image_cmds: HashMap::new(),
rendered_images: HashMap::new(),
}
}
}
impl BlobImageRenderer for CheckerboardRenderer {
fn add(&mut self, key: ImageKey, cmds: BlobImageData, _: Option<TileSize>) {
self.image_cmds
.insert(key, deserialize_blob(&cmds[..]).unwrap());
}
fn update(&mut self, key: ImageKey, cmds: BlobImageData, _dirty_rect: Option<DeviceUintRect>) {
// Here, updating is just replacing the current version of the commands with
// the new one (no incremental updates).
self.image_cmds
.insert(key, deserialize_blob(&cmds[..]).unwrap());
}
fn delete(&mut self, key: ImageKey) {
self.image_cmds.remove(&key);
}
fn request(
&mut self,
_resources: &BlobImageResources,
request: BlobImageRequest,
descriptor: &BlobImageDescriptor,
_dirty_rect: Option<DeviceUintRect>,
) {
(self.callbacks.lock().unwrap().request)(&request);
assert!(!self.rendered_images.contains_key(&request));
// This method is where we kick off our rendering jobs.
// It should avoid doing work on the calling thread as much as possible.
// In this example we will use the thread pool to render individual tiles.
// Gather the input data to send to a worker thread.
let cmds = self.image_cmds.get(&request.key).unwrap();
let result = render_blob(*cmds, descriptor, request.tile);
self.rendered_images.insert(request, result);
}
fn resolve(&mut self, request: BlobImageRequest) -> BlobImageResult {
(self.callbacks.lock().unwrap().resolve)();
self.rendered_images.remove(&request).unwrap()
}
fn delete_font(&mut self, _key: FontKey) {}
fn delete_font_instance(&mut self, _key: FontInstanceKey) {}
}

View File

@ -0,0 +1,124 @@
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use byteorder::{BigEndian, ByteOrder, ReadBytesExt, WriteBytesExt};
use core_foundation::data::CFData;
use core_graphics::font::CGFont;
use std;
use std::io::Cursor;
use std::io::Read;
use std::io::Write;
fn calc_table_checksum<D: Read>(mut data: D) -> u32 {
let mut sum: u32 = 0;
while let Ok(x) = data.read_u32::<BigEndian>() {
sum = sum.wrapping_add(x);
}
// read the remaining bytes
let mut buf: [u8; 4] = [0; 4];
data.read(&mut buf).unwrap();
// if there was nothing left in buf we'll just read a 0
// which won't affect the checksum
sum = sum.wrapping_add(BigEndian::read_u32(&buf));
sum
}
fn max_pow_2_less_than(a: u16) -> u16 {
let x = 1;
let mut shift = 0;
while (x << (shift + 1)) < a {
shift += 1;
}
shift
}
struct TableRecord {
tag: u32,
checksum: u32,
offset: u32,
length: u32,
data: CFData
}
const CFF_TAG: u32 = 0x43464620; // 'CFF '
const HEAD_TAG: u32 = 0x68656164; // 'head'
const OTTO_TAG: u32 = 0x4f54544f; // 'OTTO'
const TRUE_TAG: u32 = 0x00010000;
pub fn font_to_data(font: CGFont) -> Result<Vec<u8>, std::io::Error> {
// We'll reconstruct a TTF font from the tables we can get from the CGFont
let mut cff = false;
let tags = font.copy_table_tags();
let count = tags.len() as u16;
let mut records = Vec::new();
let mut offset: u32 = 0;
offset += 4 * 3;
offset += 4 * 4 * (count as u32);
for tag in tags.iter() {
if tag == CFF_TAG {
cff = true;
}
let data = font.copy_table_for_tag(tag).unwrap();
let length = data.len() as u32;
let checksum;
if tag == HEAD_TAG {
// we need to skip the checksum field
checksum = calc_table_checksum(&data.bytes()[0..2])
.wrapping_add(calc_table_checksum(&data.bytes()[3..]))
} else {
checksum = calc_table_checksum(data.bytes());
}
records.push(TableRecord { tag, offset, data, length, checksum } );
offset += length;
// 32 bit align the tables
offset = (offset + 3) & !3;
}
let mut buf = Vec::new();
if cff {
buf.write_u32::<BigEndian>(OTTO_TAG)?;
} else {
buf.write_u32::<BigEndian>(TRUE_TAG)?;
}
buf.write_u16::<BigEndian>(count)?;
buf.write_u16::<BigEndian>((1 << max_pow_2_less_than(count)) * 16)?;
buf.write_u16::<BigEndian>(max_pow_2_less_than(count))?;
buf.write_u16::<BigEndian>(count * 16 - ((1 << max_pow_2_less_than(count)) * 16))?;
// write table record entries
for r in &records {
buf.write_u32::<BigEndian>(r.tag)?;
buf.write_u32::<BigEndian>(r.checksum)?;
buf.write_u32::<BigEndian>(r.offset)?;
buf.write_u32::<BigEndian>(r.length)?;
}
// write tables
let mut check_sum_adjustment_offset = 0;
for r in &records {
if r.tag == 0x68656164 {
check_sum_adjustment_offset = buf.len() + 2 * 4;
}
buf.write(r.data.bytes())?;
// 32 bit align the tables
while buf.len() & 3 != 0 {
buf.push(0);
}
}
let mut c = Cursor::new(buf);
c.set_position(check_sum_adjustment_offset as u64);
// clear the checksumAdjust field before checksumming the whole font
c.write_u32::<BigEndian>(0)?;
let sum = 0xb1b0afba_u32.wrapping_sub(calc_table_checksum(&c.get_mut()[..]));
// set checkSumAdjust to the computed checksum
c.set_position(check_sum_adjustment_offset as u64);
c.write_u32::<BigEndian>(sum)?;
Ok(c.into_inner())
}

View File

@ -0,0 +1,317 @@
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
// the json code is largely unfinished; allow these to silence a bunch of warnings
#![allow(unused_variables)]
#![allow(dead_code)]
use app_units::Au;
use image::{save_buffer, ColorType};
use premultiply::unpremultiply;
use serde_json;
use std::collections::HashMap;
use std::io::Write;
use std::path::{Path, PathBuf};
use std::{fmt, fs};
use super::CURRENT_FRAME_NUMBER;
use time;
use webrender;
use webrender::api::*;
use webrender::api::channel::Payload;
enum CachedFont {
Native(NativeFontHandle),
Raw(Option<Vec<u8>>, u32, Option<PathBuf>),
}
struct CachedFontInstance {
font_key: FontKey,
glyph_size: Au,
}
struct CachedImage {
width: u32,
height: u32,
stride: u32,
format: ImageFormat,
bytes: Option<Vec<u8>>,
path: Option<PathBuf>,
}
pub struct JsonFrameWriter {
frame_base: PathBuf,
rsrc_base: PathBuf,
rsrc_prefix: String,
next_rsrc_num: u32,
images: HashMap<ImageKey, CachedImage>,
fonts: HashMap<FontKey, CachedFont>,
font_instances: HashMap<FontInstanceKey, CachedFontInstance>,
last_frame_written: u32,
dl_descriptor: Option<BuiltDisplayListDescriptor>,
}
impl JsonFrameWriter {
pub fn new(path: &Path) -> Self {
let mut rsrc_base = path.to_owned();
rsrc_base.push("res");
fs::create_dir_all(&rsrc_base).ok();
let rsrc_prefix = format!("{}", time::get_time().sec);
JsonFrameWriter {
frame_base: path.to_owned(),
rsrc_base,
rsrc_prefix,
next_rsrc_num: 1,
images: HashMap::new(),
fonts: HashMap::new(),
font_instances: HashMap::new(),
dl_descriptor: None,
last_frame_written: u32::max_value(),
}
}
pub fn begin_write_display_list(
&mut self,
_: &Epoch,
_: &PipelineId,
_: &Option<ColorF>,
_: &LayoutSize,
display_list: &BuiltDisplayListDescriptor,
) {
unsafe {
if CURRENT_FRAME_NUMBER == self.last_frame_written {
return;
}
self.last_frame_written = CURRENT_FRAME_NUMBER;
}
self.dl_descriptor = Some(display_list.clone());
}
pub fn finish_write_display_list(&mut self, frame: u32, data: &[u8]) {
let payload = Payload::from_data(data);
let dl_desc = self.dl_descriptor.take().unwrap();
let dl = BuiltDisplayList::from_data(payload.display_list_data, dl_desc);
let mut frame_file_name = self.frame_base.clone();
let current_shown_frame = unsafe { CURRENT_FRAME_NUMBER };
frame_file_name.push(format!("frame-{}.json", current_shown_frame));
let mut file = fs::File::create(&frame_file_name).unwrap();
let s = serde_json::to_string_pretty(&dl).unwrap();
file.write_all(&s.into_bytes()).unwrap();
file.write_all(b"\n").unwrap();
}
fn update_resources(&mut self, updates: &ResourceUpdates) {
for update in &updates.updates {
match *update {
ResourceUpdate::AddImage(ref img) => {
let stride = img.descriptor.stride.unwrap_or(
img.descriptor.width * img.descriptor.format.bytes_per_pixel(),
);
let bytes = match img.data {
ImageData::Raw(ref v) => (**v).clone(),
ImageData::External(_) | ImageData::Blob(_) => {
return;
}
};
self.images.insert(
img.key,
CachedImage {
width: img.descriptor.width,
height: img.descriptor.height,
stride,
format: img.descriptor.format,
bytes: Some(bytes),
path: None,
},
);
}
ResourceUpdate::UpdateImage(ref img) => {
if let Some(ref mut data) = self.images.get_mut(&img.key) {
assert_eq!(data.width, img.descriptor.width);
assert_eq!(data.height, img.descriptor.height);
assert_eq!(data.format, img.descriptor.format);
if let ImageData::Raw(ref bytes) = img.data {
data.path = None;
data.bytes = Some((**bytes).clone());
} else {
// Other existing image types only make sense within the gecko integration.
println!(
"Wrench only supports updating buffer images ({}).",
"ignoring update commands"
);
}
}
}
ResourceUpdate::DeleteImage(img) => {
self.images.remove(&img);
}
ResourceUpdate::AddFont(ref font) => match font {
&AddFont::Raw(key, ref bytes, index) => {
self.fonts
.insert(key, CachedFont::Raw(Some(bytes.clone()), index, None));
}
&AddFont::Native(key, ref handle) => {
self.fonts.insert(key, CachedFont::Native(handle.clone()));
}
},
ResourceUpdate::DeleteFont(_) => {}
ResourceUpdate::AddFontInstance(ref instance) => {
self.font_instances.insert(
instance.key,
CachedFontInstance {
font_key: instance.font_key,
glyph_size: instance.glyph_size,
},
);
}
ResourceUpdate::DeleteFontInstance(_) => {}
}
}
}
fn next_rsrc_paths(
prefix: &str,
counter: &mut u32,
base_path: &Path,
base: &str,
ext: &str,
) -> (PathBuf, PathBuf) {
let mut path_file = base_path.to_owned();
let mut path = PathBuf::from("res");
let fstr = format!("{}-{}-{}.{}", prefix, base, counter, ext);
path_file.push(&fstr);
path.push(&fstr);
*counter += 1;
(path_file, path)
}
fn path_for_image(&mut self, key: ImageKey) -> Option<PathBuf> {
if let Some(ref mut data) = self.images.get_mut(&key) {
if data.path.is_some() {
return data.path.clone();
}
} else {
return None;
};
// Remove the data to munge it
let mut data = self.images.remove(&key).unwrap();
let mut bytes = data.bytes.take().unwrap();
let (path_file, path) = Self::next_rsrc_paths(
&self.rsrc_prefix,
&mut self.next_rsrc_num,
&self.rsrc_base,
"img",
"png",
);
let ok = match data.format {
ImageFormat::BGRA8 => if data.stride == data.width * 4 {
unpremultiply(bytes.as_mut_slice());
save_buffer(
&path_file,
&bytes,
data.width,
data.height,
ColorType::RGBA(8),
).unwrap();
true
} else {
false
},
ImageFormat::R8 => if data.stride == data.width {
save_buffer(
&path_file,
&bytes,
data.width,
data.height,
ColorType::Gray(8),
).unwrap();
true
} else {
false
},
_ => false,
};
if !ok {
println!(
"Failed to write image with format {:?}, dimensions {}x{}, stride {}",
data.format,
data.width,
data.height,
data.stride
);
return None;
}
data.path = Some(path.clone());
// put it back
self.images.insert(key, data);
Some(path)
}
}
impl fmt::Debug for JsonFrameWriter {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "JsonFrameWriter")
}
}
impl webrender::ApiRecordingReceiver for JsonFrameWriter {
fn write_msg(&mut self, _: u32, msg: &ApiMsg) {
match *msg {
ApiMsg::UpdateResources(ref updates) => self.update_resources(updates),
ApiMsg::UpdateDocument(_, ref doc_msgs) => {
for doc_msg in doc_msgs {
match *doc_msg {
DocumentMsg::UpdateResources(ref resources) => {
self.update_resources(resources);
}
DocumentMsg::SetDisplayList {
ref epoch,
ref pipeline_id,
ref background,
ref viewport_size,
ref list_descriptor,
..
} => {
self.begin_write_display_list(
epoch,
pipeline_id,
background,
viewport_size,
list_descriptor,
);
}
_ => {}
}
}
}
ApiMsg::CloneApi(..) => {}
_ => {}
}
}
fn write_payload(&mut self, frame: u32, data: &[u8]) {
if self.dl_descriptor.is_some() {
self.finish_write_display_list(frame, data);
}
}
}

597
gfx/wrench/src/main.rs Normal file
View File

@ -0,0 +1,597 @@
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
extern crate app_units;
extern crate base64;
extern crate bincode;
extern crate byteorder;
#[macro_use]
extern crate clap;
#[cfg(target_os = "macos")]
extern crate core_foundation;
#[cfg(target_os = "macos")]
extern crate core_graphics;
extern crate crossbeam;
#[cfg(target_os = "windows")]
extern crate dwrote;
#[cfg(feature = "logging")]
extern crate env_logger;
extern crate euclid;
#[cfg(any(target_os = "linux", target_os = "macos"))]
extern crate font_loader;
extern crate gleam;
extern crate glutin;
extern crate image;
#[macro_use]
extern crate lazy_static;
#[macro_use]
extern crate log;
#[cfg(feature = "headless")]
extern crate osmesa_sys;
extern crate ron;
#[macro_use]
extern crate serde;
extern crate serde_json;
extern crate time;
extern crate webrender;
extern crate yaml_rust;
mod binary_frame_reader;
mod blob;
mod json_frame_writer;
mod parse_function;
mod perf;
mod png;
mod premultiply;
mod rawtest;
mod reftest;
mod ron_frame_writer;
mod scene;
mod wrench;
mod yaml_frame_reader;
mod yaml_frame_writer;
mod yaml_helper;
#[cfg(target_os = "macos")]
mod cgfont_to_data;
use binary_frame_reader::BinaryFrameReader;
use gleam::gl;
use glutin::{ElementState, VirtualKeyCode, WindowProxy};
use perf::PerfHarness;
use png::save_flipped;
use rawtest::RawtestHarness;
use reftest::{ReftestHarness, ReftestOptions};
#[cfg(feature = "headless")]
use std::ffi::CString;
#[cfg(feature = "headless")]
use std::mem;
use std::os::raw::c_void;
use std::path::{Path, PathBuf};
use std::ptr;
use std::rc::Rc;
use std::sync::mpsc::{channel, Sender, Receiver};
use webrender::DebugFlags;
use webrender::api::*;
use wrench::{Wrench, WrenchThing};
use yaml_frame_reader::YamlFrameReader;
lazy_static! {
static ref PLATFORM_DEFAULT_FACE_NAME: String = String::from("Arial");
static ref WHITE_COLOR: ColorF = ColorF::new(1.0, 1.0, 1.0, 1.0);
static ref BLACK_COLOR: ColorF = ColorF::new(0.0, 0.0, 0.0, 1.0);
}
pub static mut CURRENT_FRAME_NUMBER: u32 = 0;
#[cfg(feature = "headless")]
pub struct HeadlessContext {
width: u32,
height: u32,
_context: osmesa_sys::OSMesaContext,
_buffer: Vec<u32>,
}
#[cfg(not(feature = "headless"))]
pub struct HeadlessContext {
width: u32,
height: u32,
}
impl HeadlessContext {
#[cfg(feature = "headless")]
fn new(width: u32, height: u32) -> HeadlessContext {
let mut attribs = Vec::new();
attribs.push(osmesa_sys::OSMESA_PROFILE);
attribs.push(osmesa_sys::OSMESA_CORE_PROFILE);
attribs.push(osmesa_sys::OSMESA_CONTEXT_MAJOR_VERSION);
attribs.push(3);
attribs.push(osmesa_sys::OSMESA_CONTEXT_MINOR_VERSION);
attribs.push(3);
attribs.push(osmesa_sys::OSMESA_DEPTH_BITS);
attribs.push(24);
attribs.push(0);
let context =
unsafe { osmesa_sys::OSMesaCreateContextAttribs(attribs.as_ptr(), ptr::null_mut()) };
assert!(!context.is_null());
let mut buffer = vec![0; (width * height) as usize];
unsafe {
let ret = osmesa_sys::OSMesaMakeCurrent(
context,
buffer.as_mut_ptr() as *mut _,
gl::UNSIGNED_BYTE,
width as i32,
height as i32,
);
assert!(ret != 0);
};
HeadlessContext {
width,
height,
_context: context,
_buffer: buffer,
}
}
#[cfg(not(feature = "headless"))]
fn new(width: u32, height: u32) -> HeadlessContext {
HeadlessContext { width, height }
}
#[cfg(feature = "headless")]
fn get_proc_address(s: &str) -> *const c_void {
let c_str = CString::new(s).expect("Unable to create CString");
unsafe { mem::transmute(osmesa_sys::OSMesaGetProcAddress(c_str.as_ptr())) }
}
#[cfg(not(feature = "headless"))]
fn get_proc_address(_: &str) -> *const c_void {
ptr::null() as *const _
}
}
pub enum WindowWrapper {
Window(glutin::Window, Rc<gl::Gl>),
Headless(HeadlessContext, Rc<gl::Gl>),
}
pub struct HeadlessEventIterater;
impl WindowWrapper {
fn swap_buffers(&self) {
match *self {
WindowWrapper::Window(ref window, _) => window.swap_buffers().unwrap(),
WindowWrapper::Headless(..) => {}
}
}
fn get_inner_size(&self) -> DeviceUintSize {
let (w, h) = match *self {
WindowWrapper::Window(ref window, _) => window.get_inner_size_pixels().unwrap(),
WindowWrapper::Headless(ref context, _) => (context.width, context.height),
};
DeviceUintSize::new(w, h)
}
fn hidpi_factor(&self) -> f32 {
match *self {
WindowWrapper::Window(ref window, _) => window.hidpi_factor(),
WindowWrapper::Headless(..) => 1.0,
}
}
fn resize(&mut self, size: DeviceUintSize) {
match *self {
WindowWrapper::Window(ref mut window, _) => window.set_inner_size(size.width, size.height),
WindowWrapper::Headless(_, _) => unimplemented!(), // requites Glutin update
}
}
fn create_window_proxy(&mut self) -> Option<WindowProxy> {
match *self {
WindowWrapper::Window(ref window, _) => Some(window.create_window_proxy()),
WindowWrapper::Headless(..) => None,
}
}
fn set_title(&mut self, title: &str) {
match *self {
WindowWrapper::Window(ref window, _) => window.set_title(title),
WindowWrapper::Headless(..) => (),
}
}
pub fn gl(&self) -> &gl::Gl {
match *self {
WindowWrapper::Window(_, ref gl) | WindowWrapper::Headless(_, ref gl) => &**gl,
}
}
pub fn clone_gl(&self) -> Rc<gl::Gl> {
match *self {
WindowWrapper::Window(_, ref gl) | WindowWrapper::Headless(_, ref gl) => gl.clone(),
}
}
}
fn make_window(
size: DeviceUintSize,
dp_ratio: Option<f32>,
vsync: bool,
headless: bool,
) -> WindowWrapper {
let wrapper = if headless {
let gl = match gl::GlType::default() {
gl::GlType::Gl => unsafe {
gl::GlFns::load_with(|symbol| {
HeadlessContext::get_proc_address(symbol) as *const _
})
},
gl::GlType::Gles => unsafe {
gl::GlesFns::load_with(|symbol| {
HeadlessContext::get_proc_address(symbol) as *const _
})
},
};
WindowWrapper::Headless(HeadlessContext::new(size.width, size.height), gl)
} else {
let mut builder = glutin::WindowBuilder::new()
.with_gl(glutin::GlRequest::GlThenGles {
opengl_version: (3, 2),
opengles_version: (3, 0),
})
.with_dimensions(size.width, size.height);
builder.opengl.vsync = vsync;
let window = builder.build().unwrap();
unsafe {
window
.make_current()
.expect("unable to make context current!");
}
let gl = match window.get_api() {
glutin::Api::OpenGl => unsafe {
gl::GlFns::load_with(|symbol| window.get_proc_address(symbol) as *const _)
},
glutin::Api::OpenGlEs => unsafe {
gl::GlesFns::load_with(|symbol| window.get_proc_address(symbol) as *const _)
},
glutin::Api::WebGl => unimplemented!(),
};
WindowWrapper::Window(window, gl)
};
wrapper.gl().clear_color(0.3, 0.0, 0.0, 1.0);
let gl_version = wrapper.gl().get_string(gl::VERSION);
let gl_renderer = wrapper.gl().get_string(gl::RENDERER);
let dp_ratio = dp_ratio.unwrap_or(wrapper.hidpi_factor());
println!("OpenGL version {}, {}", gl_version, gl_renderer);
println!(
"hidpi factor: {} (native {})",
dp_ratio,
wrapper.hidpi_factor()
);
wrapper
}
struct Notifier {
tx: Sender<()>,
}
// setup a notifier so we can wait for frames to be finished
impl RenderNotifier for Notifier {
fn clone(&self) -> Box<RenderNotifier> {
Box::new(Notifier {
tx: self.tx.clone(),
})
}
fn wake_up(&self) {
self.tx.send(()).unwrap();
}
fn new_document_ready(&self, _: DocumentId, scrolled: bool, _composite_needed: bool) {
if !scrolled {
self.wake_up();
}
}
}
fn create_notifier() -> (Box<RenderNotifier>, Receiver<()>) {
let (tx, rx) = channel();
(Box::new(Notifier { tx: tx }), rx)
}
fn main() {
#[cfg(feature = "logging")]
env_logger::init().unwrap();
let args_yaml = load_yaml!("args.yaml");
let args = clap::App::from_yaml(args_yaml)
.setting(clap::AppSettings::ArgRequiredElseHelp)
.get_matches();
// handle some global arguments
let res_path = args.value_of("shaders").map(|s| PathBuf::from(s));
let dp_ratio = args.value_of("dp_ratio").map(|v| v.parse::<f32>().unwrap());
let save_type = args.value_of("save").map(|s| match s {
"yaml" => wrench::SaveType::Yaml,
"json" => wrench::SaveType::Json,
"ron" => wrench::SaveType::Ron,
"binary" => wrench::SaveType::Binary,
_ => panic!("Save type must be json, ron, yaml, or binary")
});
let size = args.value_of("size")
.map(|s| if s == "720p" {
DeviceUintSize::new(1280, 720)
} else if s == "1080p" {
DeviceUintSize::new(1920, 1080)
} else if s == "4k" {
DeviceUintSize::new(3840, 2160)
} else {
let x = s.find('x').expect(
"Size must be specified exactly as 720p, 1080p, 4k, or width x height",
);
let w = s[0 .. x].parse::<u32>().expect("Invalid size width");
let h = s[x + 1 ..].parse::<u32>().expect("Invalid size height");
DeviceUintSize::new(w, h)
})
.unwrap_or(DeviceUintSize::new(1920, 1080));
let is_headless = args.is_present("headless");
let zoom_factor = args.value_of("zoom").map(|z| z.parse::<f32>().unwrap());
let mut window = make_window(size, dp_ratio, args.is_present("vsync"), is_headless);
let dp_ratio = dp_ratio.unwrap_or(window.hidpi_factor());
let dim = window.get_inner_size();
let needs_frame_notifier = ["perf", "reftest", "png", "rawtest"]
.iter()
.any(|s| args.subcommand_matches(s).is_some());
let (notifier, rx) = if needs_frame_notifier {
let (notifier, rx) = create_notifier();
(Some(notifier), Some(rx))
} else {
(None, None)
};
let mut wrench = Wrench::new(
&mut window,
res_path,
dp_ratio,
save_type,
dim,
args.is_present("rebuild"),
args.is_present("no_subpixel_aa"),
args.is_present("debug"),
args.is_present("verbose"),
args.is_present("no_scissor"),
args.is_present("no_batch"),
args.is_present("precache"),
args.is_present("slow_subpixel"),
zoom_factor.unwrap_or(1.0),
notifier,
);
let mut thing = if let Some(subargs) = args.subcommand_matches("show") {
Box::new(YamlFrameReader::new_from_args(subargs)) as Box<WrenchThing>
} else if let Some(subargs) = args.subcommand_matches("replay") {
Box::new(BinaryFrameReader::new_from_args(subargs)) as Box<WrenchThing>
} else if let Some(subargs) = args.subcommand_matches("png") {
let surface = match subargs.value_of("surface") {
Some("screen") | None => png::ReadSurface::Screen,
Some("gpu-cache") => png::ReadSurface::GpuCache,
_ => panic!("Unknown surface argument value")
};
let reader = YamlFrameReader::new_from_args(subargs);
png::png(&mut wrench, surface, &mut window, reader, rx.unwrap());
wrench.renderer.deinit();
return;
} else if let Some(subargs) = args.subcommand_matches("reftest") {
let dim = window.get_inner_size();
let harness = ReftestHarness::new(&mut wrench, &mut window, rx.unwrap());
let base_manifest = Path::new("reftests/reftest.list");
let specific_reftest = subargs.value_of("REFTEST").map(|x| Path::new(x));
let mut reftest_options = ReftestOptions::default();
if let Some(allow_max_diff) = subargs.value_of("fuzz_tolerance") {
reftest_options.allow_max_difference = allow_max_diff.parse().unwrap_or(1);
reftest_options.allow_num_differences = dim.width as usize * dim.height as usize;
}
harness.run(base_manifest, specific_reftest, &reftest_options);
return;
} else if let Some(_) = args.subcommand_matches("rawtest") {
{
let harness = RawtestHarness::new(&mut wrench, &mut window, rx.unwrap());
harness.run();
}
wrench.renderer.deinit();
return;
} else if let Some(subargs) = args.subcommand_matches("perf") {
// Perf mode wants to benchmark the total cost of drawing
// a new displaty list each frame.
wrench.rebuild_display_lists = true;
let harness = PerfHarness::new(&mut wrench, &mut window, rx.unwrap());
let base_manifest = Path::new("benchmarks/benchmarks.list");
let filename = subargs.value_of("filename").unwrap();
harness.run(base_manifest, filename);
return;
} else if let Some(subargs) = args.subcommand_matches("compare_perf") {
let first_filename = subargs.value_of("first_filename").unwrap();
let second_filename = subargs.value_of("second_filename").unwrap();
perf::compare(first_filename, second_filename);
return;
} else if let Some(subargs) = args.subcommand_matches("load") {
let path = PathBuf::from(subargs.value_of("path").unwrap());
let mut documents = wrench.api.load_capture(path);
println!("loaded {:?}", documents.iter().map(|cd| cd.document_id).collect::<Vec<_>>());
let captured = documents.swap_remove(0);
window.resize(captured.window_size);
wrench.document_id = captured.document_id;
Box::new(captured) as Box<WrenchThing>
} else {
panic!("Should never have gotten here! {:?}", args);
};
let mut show_help = false;
let mut do_loop = false;
let mut cpu_profile_index = 0;
let dim = window.get_inner_size();
wrench.update(dim);
thing.do_frame(&mut wrench);
'outer: loop {
if let Some(window_title) = wrench.take_title() {
window.set_title(&window_title);
}
let mut events = Vec::new();
match window {
WindowWrapper::Headless(..) => {
events.push(glutin::Event::Awakened);
}
WindowWrapper::Window(ref window, _) => {
events.push(window.wait_events().next().unwrap());
events.extend(window.poll_events());
}
}
let mut do_frame = false;
let mut do_render = false;
for event in events {
match event {
glutin::Event::Closed => {
break 'outer;
}
glutin::Event::Refresh |
glutin::Event::Awakened |
glutin::Event::Focused(..) |
glutin::Event::MouseMoved(..) => {
do_render = true;
}
glutin::Event::KeyboardInput(ElementState::Pressed, _scan_code, Some(vk)) => match vk {
VirtualKeyCode::Escape => {
break 'outer;
}
VirtualKeyCode::P => {
wrench.renderer.toggle_debug_flags(DebugFlags::PROFILER_DBG);
do_render = true;
}
VirtualKeyCode::O => {
wrench.renderer.toggle_debug_flags(DebugFlags::RENDER_TARGET_DBG);
do_render = true;
}
VirtualKeyCode::I => {
wrench.renderer.toggle_debug_flags(DebugFlags::TEXTURE_CACHE_DBG);
do_render = true;
}
VirtualKeyCode::B => {
wrench.renderer.toggle_debug_flags(DebugFlags::ALPHA_PRIM_DBG);
do_render = true;
}
VirtualKeyCode::S => {
wrench.renderer.toggle_debug_flags(DebugFlags::COMPACT_PROFILER);
do_render = true;
}
VirtualKeyCode::Q => {
wrench.renderer.toggle_debug_flags(
DebugFlags::GPU_TIME_QUERIES | DebugFlags::GPU_SAMPLE_QUERIES
);
do_render = true;
}
VirtualKeyCode::R => {
wrench.set_page_zoom(ZoomFactor::new(1.0));
do_frame = true;
}
VirtualKeyCode::M => {
wrench.api.notify_memory_pressure();
do_render = true;
}
VirtualKeyCode::L => {
do_loop = !do_loop;
do_render = true;
}
VirtualKeyCode::Left => {
thing.prev_frame();
do_frame = true;
}
VirtualKeyCode::Right => {
thing.next_frame();
do_frame = true;
}
VirtualKeyCode::H => {
show_help = !show_help;
do_render = true;
}
VirtualKeyCode::T => {
let file_name = format!("profile-{}.json", cpu_profile_index);
wrench.renderer.save_cpu_profile(&file_name);
cpu_profile_index += 1;
}
VirtualKeyCode::C => {
let path = PathBuf::from("../captures/wrench");
wrench.api.save_capture(path, CaptureBits::all());
}
VirtualKeyCode::Up => {
let current_zoom = wrench.get_page_zoom();
let new_zoom_factor = ZoomFactor::new(current_zoom.get() + 0.1);
wrench.set_page_zoom(new_zoom_factor);
do_frame = true;
}
VirtualKeyCode::Down => {
let current_zoom = wrench.get_page_zoom();
let new_zoom_factor = ZoomFactor::new((current_zoom.get() - 0.1).max(0.1));
wrench.set_page_zoom(new_zoom_factor);
do_frame = true;
}
_ => {}
}
_ => {}
}
}
let dim = window.get_inner_size();
wrench.update(dim);
if do_frame {
let frame_num = thing.do_frame(&mut wrench);
unsafe {
CURRENT_FRAME_NUMBER = frame_num;
}
}
if do_render {
if show_help {
wrench.show_onscreen_help();
}
wrench.render();
window.swap_buffers();
if do_loop {
thing.next_frame();
}
}
}
if is_headless {
let rect = DeviceUintRect::new(DeviceUintPoint::zero(), size);
let pixels = wrench.renderer.read_pixels_rgba8(rect);
save_flipped("screenshot.png", pixels, size);
}
wrench.renderer.deinit();
}

View File

@ -0,0 +1,121 @@
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use std::str::CharIndices;
// support arguments like '4', 'ab', '4.0'
fn acceptable_arg_character(c: char) -> bool {
c.is_alphanumeric() || c == '.' || c == '-'
}
// A crapy parser for parsing strings like "translate(1, 3)"
pub fn parse_function(s: &str) -> (&str, Vec<&str>, &str) {
// XXX: This it not particular easy to read. Sorry.
struct Parser<'a> {
itr: CharIndices<'a>,
start: usize,
o: Option<(usize, char)>,
}
impl<'a> Parser<'a> {
fn skip_whitespace(&mut self) {
while let Some(k) = self.o {
if !k.1.is_whitespace() {
break;
}
self.start = k.0 + k.1.len_utf8();
self.o = self.itr.next();
}
}
}
let mut c = s.char_indices();
let o = c.next();
let mut p = Parser {
itr: c,
start: 0,
o: o,
};
p.skip_whitespace();
let mut end = p.start;
while let Some(k) = p.o {
if !k.1.is_alphabetic() && k.1 != '_' && k.1 != '-' {
break;
}
end = k.0 + k.1.len_utf8();
p.o = p.itr.next();
}
let name = &s[p.start .. end];
let mut args = Vec::new();
p.skip_whitespace();
if let Some(k) = p.o {
if k.1 != '(' {
return (name, args, &s[p.start ..]);
}
p.start = k.0 + k.1.len_utf8();
p.o = p.itr.next();
}
loop {
p.skip_whitespace();
let mut end = p.start;
let mut bracket_count: i32 = 0;
while let Some(k) = p.o {
let prev_bracket_count = bracket_count;
if k.1 == '[' {
bracket_count = bracket_count + 1;
} else if k.1 == ']' {
bracket_count = bracket_count - 1;
}
if bracket_count < 0 {
println!("Unexpected closing bracket");
break;
}
let not_in_bracket = bracket_count == 0 && prev_bracket_count == 0;
if !acceptable_arg_character(k.1) && not_in_bracket {
break;
}
end = k.0 + k.1.len_utf8();
p.o = p.itr.next();
}
args.push(&s[p.start .. end]);
p.skip_whitespace();
if let Some(k) = p.o {
p.start = k.0 + k.1.len_utf8();
p.o = p.itr.next();
// unless we find a comma we're done
if k.1 != ',' {
if k.1 != ')' {
println!("Unexpected closing character: {}", k.1);
}
break;
}
} else {
break;
}
}
(name, args, &s[p.start ..])
}
#[test]
fn test() {
assert_eq!(parse_function("rotate(40)").0, "rotate");
assert_eq!(parse_function(" rotate(40)").0, "rotate");
assert_eq!(parse_function(" rotate (40)").0, "rotate");
assert_eq!(parse_function(" rotate ( 40 )").1[0], "40");
assert_eq!(parse_function("rotate(-40.0)").1[0], "-40.0");
assert_eq!(parse_function("drop-shadow(0, [1, 2, 3, 4], 5)").1[0], "0");
assert_eq!(parse_function("drop-shadow(0, [1, 2, 3, 4], 5)").1[1], "[1, 2, 3, 4]");
assert_eq!(parse_function("drop-shadow(0, [1, 2, 3, 4], 5)").1[2], "5");
assert_eq!(parse_function("drop-shadow(0, [1, 2, [3, 4]], 5)").1[1], "[1, 2, [3, 4]]");
}

279
gfx/wrench/src/perf.rs Normal file
View File

@ -0,0 +1,279 @@
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use WindowWrapper;
use serde_json;
use std::collections::{HashMap, HashSet};
use std::fs::File;
use std::io::{BufRead, BufReader};
use std::io::{Read, Write};
use std::path::{Path, PathBuf};
use std::sync::mpsc::Receiver;
use wrench::{Wrench, WrenchThing};
use yaml_frame_reader::YamlFrameReader;
const COLOR_DEFAULT: &str = "\x1b[0m";
const COLOR_RED: &str = "\x1b[31m";
const COLOR_GREEN: &str = "\x1b[32m";
const COLOR_MAGENTA: &str = "\x1b[95m";
const MIN_SAMPLE_COUNT: usize = 50;
const SAMPLE_EXCLUDE_COUNT: usize = 10;
pub struct Benchmark {
pub test: PathBuf,
}
pub struct BenchmarkManifest {
pub benchmarks: Vec<Benchmark>,
}
impl BenchmarkManifest {
pub fn new(manifest: &Path) -> BenchmarkManifest {
let dir = manifest.parent().unwrap();
let f =
File::open(manifest).expect(&format!("couldn't open manifest: {}", manifest.display()));
let file = BufReader::new(&f);
let mut benchmarks = Vec::new();
for line in file.lines() {
let l = line.unwrap();
// strip the comments
let s = &l[0 .. l.find('#').unwrap_or(l.len())];
let s = s.trim();
if s.is_empty() {
continue;
}
let mut items = s.split_whitespace();
match items.next() {
Some("include") => {
let include = dir.join(items.next().unwrap());
benchmarks.append(&mut BenchmarkManifest::new(include.as_path()).benchmarks);
}
Some(name) => {
let test = dir.join(name);
benchmarks.push(Benchmark { test });
}
_ => panic!(),
};
}
BenchmarkManifest {
benchmarks: benchmarks,
}
}
}
#[derive(Clone, Serialize, Deserialize)]
struct TestProfile {
name: String,
backend_time_ns: u64,
composite_time_ns: u64,
paint_time_ns: u64,
draw_calls: usize,
}
#[derive(Serialize, Deserialize)]
struct Profile {
tests: Vec<TestProfile>,
}
impl Profile {
fn new() -> Profile {
Profile { tests: Vec::new() }
}
fn add(&mut self, profile: TestProfile) {
self.tests.push(profile);
}
fn save(&self, filename: &str) {
let mut file = File::create(&filename).unwrap();
let s = serde_json::to_string_pretty(self).unwrap();
file.write_all(&s.into_bytes()).unwrap();
file.write_all(b"\n").unwrap();
}
fn load(filename: &str) -> Profile {
let mut file = File::open(&filename).unwrap();
let mut string = String::new();
file.read_to_string(&mut string).unwrap();
serde_json::from_str(&string).expect("Unable to load profile!")
}
fn build_set_and_map_of_tests(&self) -> (HashSet<String>, HashMap<String, TestProfile>) {
let mut hash_set = HashSet::new();
let mut hash_map = HashMap::new();
for test in &self.tests {
hash_set.insert(test.name.clone());
hash_map.insert(test.name.clone(), test.clone());
}
(hash_set, hash_map)
}
}
pub struct PerfHarness<'a> {
wrench: &'a mut Wrench,
window: &'a mut WindowWrapper,
rx: Receiver<()>,
}
impl<'a> PerfHarness<'a> {
pub fn new(wrench: &'a mut Wrench, window: &'a mut WindowWrapper, rx: Receiver<()>) -> Self {
PerfHarness { wrench, window, rx }
}
pub fn run(mut self, base_manifest: &Path, filename: &str) {
let manifest = BenchmarkManifest::new(base_manifest);
let mut profile = Profile::new();
for t in manifest.benchmarks {
let stats = self.render_yaml(t.test.as_path());
profile.add(stats);
}
profile.save(filename);
}
fn render_yaml(&mut self, filename: &Path) -> TestProfile {
let mut reader = YamlFrameReader::new(filename);
// Loop until we get a reasonable number of CPU and GPU
// frame profiles. Then take the mean.
let mut cpu_frame_profiles = Vec::new();
let mut gpu_frame_profiles = Vec::new();
while cpu_frame_profiles.len() < MIN_SAMPLE_COUNT ||
gpu_frame_profiles.len() < MIN_SAMPLE_COUNT
{
reader.do_frame(self.wrench);
self.rx.recv().unwrap();
self.wrench.render();
self.window.swap_buffers();
let (cpu_profiles, gpu_profiles) = self.wrench.get_frame_profiles();
cpu_frame_profiles.extend(cpu_profiles);
gpu_frame_profiles.extend(gpu_profiles);
}
// Ensure the draw calls match in every sample.
let draw_calls = cpu_frame_profiles[0].draw_calls;
assert!(
cpu_frame_profiles
.iter()
.all(|s| s.draw_calls == draw_calls)
);
let composite_time_ns = extract_sample(&mut cpu_frame_profiles, |a| a.composite_time_ns);
let paint_time_ns = extract_sample(&mut gpu_frame_profiles, |a| a.paint_time_ns);
let backend_time_ns = extract_sample(&mut cpu_frame_profiles, |a| a.backend_time_ns);
TestProfile {
name: filename.to_str().unwrap().to_string(),
composite_time_ns,
paint_time_ns,
backend_time_ns,
draw_calls,
}
}
}
fn extract_sample<F, T>(profiles: &mut [T], f: F) -> u64
where
F: Fn(&T) -> u64,
{
let mut samples: Vec<u64> = profiles.iter().map(f).collect();
samples.sort();
let useful_samples = &samples[SAMPLE_EXCLUDE_COUNT .. samples.len() - SAMPLE_EXCLUDE_COUNT];
let total_time: u64 = useful_samples.iter().sum();
total_time / useful_samples.len() as u64
}
fn select_color(base: f32, value: f32) -> &'static str {
let tolerance = base * 0.1;
if (value - base).abs() < tolerance {
COLOR_DEFAULT
} else if value > base {
COLOR_RED
} else {
COLOR_GREEN
}
}
pub fn compare(first_filename: &str, second_filename: &str) {
let profile0 = Profile::load(first_filename);
let profile1 = Profile::load(second_filename);
let (set0, map0) = profile0.build_set_and_map_of_tests();
let (set1, map1) = profile1.build_set_and_map_of_tests();
print!("+------------------------------------------------");
println!("+--------------+------------------+------------------+");
print!("| Test name ");
println!("| Draw Calls | Composite (ms) | Paint (ms) |");
print!("+------------------------------------------------");
println!("+--------------+------------------+------------------+");
for test_name in set0.symmetric_difference(&set1) {
println!(
"| {}{:47}{}|{:14}|{:18}|{:18}|",
COLOR_MAGENTA,
test_name,
COLOR_DEFAULT,
" -",
" -",
" -"
);
}
for test_name in set0.intersection(&set1) {
let test0 = &map0[test_name];
let test1 = &map1[test_name];
let composite_time0 = test0.composite_time_ns as f32 / 1000000.0;
let composite_time1 = test1.composite_time_ns as f32 / 1000000.0;
let paint_time0 = test0.paint_time_ns as f32 / 1000000.0;
let paint_time1 = test1.paint_time_ns as f32 / 1000000.0;
let draw_calls_color = if test0.draw_calls == test1.draw_calls {
COLOR_DEFAULT
} else if test0.draw_calls > test1.draw_calls {
COLOR_GREEN
} else {
COLOR_RED
};
let composite_time_color = select_color(composite_time0, composite_time1);
let paint_time_color = select_color(paint_time0, paint_time1);
let draw_call_string = format!(" {} -> {}", test0.draw_calls, test1.draw_calls);
let composite_time_string = format!(" {:.2} -> {:.2}", composite_time0, composite_time1);
let paint_time_string = format!(" {:.2} -> {:.2}", paint_time0, paint_time1);
println!(
"| {:47}|{}{:14}{}|{}{:18}{}|{}{:18}{}|",
test_name,
draw_calls_color,
draw_call_string,
COLOR_DEFAULT,
composite_time_color,
composite_time_string,
COLOR_DEFAULT,
paint_time_color,
paint_time_string,
COLOR_DEFAULT
);
}
print!("+------------------------------------------------");
println!("+--------------+------------------+------------------+");
}

113
gfx/wrench/src/png.rs Normal file
View File

@ -0,0 +1,113 @@
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use WindowWrapper;
use image::png::PNGEncoder;
use image::{self, ColorType, GenericImage};
use std::fs::File;
use std::path::Path;
use std::sync::mpsc::Receiver;
use webrender::api::*;
use wrench::{Wrench, WrenchThing};
use yaml_frame_reader::YamlFrameReader;
pub enum ReadSurface {
Screen,
GpuCache,
}
pub struct SaveSettings {
pub flip_vertical: bool,
pub try_crop: bool,
}
pub fn save<P: Clone + AsRef<Path>>(
path: P,
orig_pixels: Vec<u8>,
mut size: DeviceUintSize,
settings: SaveSettings
) {
let mut buffer = image::RgbaImage::from_raw(
size.width,
size.height,
orig_pixels,
).expect("bug: unable to construct image buffer");
if settings.flip_vertical {
// flip image vertically (texture is upside down)
buffer = image::imageops::flip_vertical(&buffer);
}
if settings.try_crop {
if let Ok(existing_image) = image::open(path.clone()) {
let old_dims = existing_image.dimensions();
println!("Crop from {:?} to {:?}", size, old_dims);
size.width = old_dims.0;
size.height = old_dims.1;
buffer = image::imageops::crop(
&mut buffer,
0,
0,
size.width,
size.height
).to_image();
}
}
let encoder = PNGEncoder::new(File::create(path).unwrap());
encoder
.encode(&buffer, size.width, size.height, ColorType::RGBA(8))
.expect("Unable to encode PNG!");
}
pub fn save_flipped<P: Clone + AsRef<Path>>(
path: P,
orig_pixels: Vec<u8>,
size: DeviceUintSize,
) {
save(path, orig_pixels, size, SaveSettings {
flip_vertical: true,
try_crop: true,
})
}
pub fn png(
wrench: &mut Wrench,
surface: ReadSurface,
window: &mut WindowWrapper,
mut reader: YamlFrameReader,
rx: Receiver<()>,
) {
reader.do_frame(wrench);
// wait for the frame
rx.recv().unwrap();
wrench.render();
let (device_size, data, settings) = match surface {
ReadSurface::Screen => {
let dim = window.get_inner_size();
let rect = DeviceUintRect::new(DeviceUintPoint::zero(), dim);
let data = wrench.renderer
.read_pixels_rgba8(rect);
(rect.size, data, SaveSettings {
flip_vertical: true,
try_crop: true,
})
}
ReadSurface::GpuCache => {
let (size, data) = wrench.renderer
.read_gpu_cache();
(size, data, SaveSettings {
flip_vertical: false,
try_crop: false,
})
}
};
let mut out_path = reader.yaml_path().clone();
out_path.set_extension("png");
save(out_path, data, device_size, settings);
}

View File

@ -0,0 +1,55 @@
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
// These are slow. Gecko's gfx/2d/Swizzle.cpp has better versions
pub fn premultiply(data: &mut [u8]) {
for pixel in data.chunks_mut(4) {
let a = pixel[3] as u32;
let b = pixel[2] as u32;
let g = pixel[1] as u32;
let r = pixel[0] as u32;
pixel[3] = a as u8;
pixel[2] = ((r * a + 128) / 255) as u8;
pixel[1] = ((g * a + 128) / 255) as u8;
pixel[0] = ((b * a + 128) / 255) as u8;
}
}
pub fn unpremultiply(data: &mut [u8]) {
for pixel in data.chunks_mut(4) {
let a = pixel[3] as u32;
let mut b = pixel[2] as u32;
let mut g = pixel[1] as u32;
let mut r = pixel[0] as u32;
if a > 0 {
r = r * 255 / a;
g = g * 255 / a;
b = b * 255 / a;
}
pixel[3] = a as u8;
pixel[2] = r as u8;
pixel[1] = g as u8;
pixel[0] = b as u8;
}
}
#[test]
fn it_works() {
let mut f = [0xff, 0xff, 0xff, 0x80, 0x00, 0xff, 0x00, 0x80];
premultiply(&mut f);
println!("{:?}", f);
assert!(
f[0] == 0x80 && f[1] == 0x80 && f[2] == 0x80 && f[3] == 0x80 && f[4] == 0x00 &&
f[5] == 0x80 && f[6] == 0x00 && f[7] == 0x80
);
unpremultiply(&mut f);
println!("{:?}", f);
assert!(
f[0] == 0xff && f[1] == 0xff && f[2] == 0xff && f[3] == 0x80 && f[4] == 0x00 &&
f[5] == 0xff && f[6] == 0x00 && f[7] == 0x80
);
}

567
gfx/wrench/src/rawtest.rs Normal file
View File

@ -0,0 +1,567 @@
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use WindowWrapper;
use blob;
use euclid::{TypedRect, TypedSize2D, TypedPoint2D};
use std::sync::Arc;
use std::sync::atomic::{AtomicIsize, Ordering};
use std::sync::mpsc::Receiver;
use webrender::api::*;
use wrench::Wrench;
pub struct RawtestHarness<'a> {
wrench: &'a mut Wrench,
rx: Receiver<()>,
window: &'a mut WindowWrapper,
}
fn point<T: Copy, U>(x: T, y: T) -> TypedPoint2D<T, U> {
TypedPoint2D::new(x, y)
}
fn size<T: Copy, U>(x: T, y: T) -> TypedSize2D<T, U> {
TypedSize2D::new(x, y)
}
fn rect<T: Copy, U>(x: T, y: T, width: T, height: T) -> TypedRect<T, U> {
TypedRect::new(point(x, y), size(width, height))
}
impl<'a> RawtestHarness<'a> {
pub fn new(wrench: &'a mut Wrench, window: &'a mut WindowWrapper, rx: Receiver<()>) -> Self {
RawtestHarness {
wrench,
rx,
window,
}
}
pub fn run(mut self) {
self.test_retained_blob_images_test();
self.test_blob_update_test();
self.test_blob_update_epoch_test();
self.test_tile_decomposition();
self.test_save_restore();
self.test_capture();
}
fn render_and_get_pixels(&mut self, window_rect: DeviceUintRect) -> Vec<u8> {
self.rx.recv().unwrap();
self.wrench.render();
self.wrench.renderer.read_pixels_rgba8(window_rect)
}
fn submit_dl(
&mut self,
epoch: &mut Epoch,
layout_size: LayoutSize,
builder: DisplayListBuilder,
resources: Option<ResourceUpdates>
) {
let mut txn = Transaction::new();
let root_background_color = Some(ColorF::new(1.0, 1.0, 1.0, 1.0));
if let Some(resources) = resources {
txn.update_resources(resources);
}
txn.set_display_list(
*epoch,
root_background_color,
layout_size,
builder.finalize(),
false,
);
epoch.0 += 1;
txn.generate_frame();
self.wrench.api.send_transaction(self.wrench.document_id, txn);
}
fn test_tile_decomposition(&mut self) {
println!("\ttile decomposition...");
// This exposes a crash in tile decomposition
let layout_size = LayoutSize::new(800., 800.);
let mut resources = ResourceUpdates::new();
let blob_img = self.wrench.api.generate_image_key();
resources.add_image(
blob_img,
ImageDescriptor::new(151, 56, ImageFormat::BGRA8, true),
ImageData::new_blob_image(blob::serialize_blob(ColorU::new(50, 50, 150, 255))),
Some(128),
);
let mut builder = DisplayListBuilder::new(self.wrench.root_pipeline_id, layout_size);
let info = LayoutPrimitiveInfo::new(rect(448.899994, 74.0, 151.000031, 56.));
// setup some malicious image size parameters
builder.push_image(
&info,
size(151., 56.0),
size(151.0, 56.0),
ImageRendering::Auto,
AlphaType::PremultipliedAlpha,
blob_img,
);
let mut epoch = Epoch(0);
self.submit_dl(&mut epoch, layout_size, builder, Some(resources));
self.rx.recv().unwrap();
self.wrench.render();
// Leaving a tiled blob image in the resource cache
// confuses the `test_capture`. TODO: remove this
resources = ResourceUpdates::new();
resources.delete_image(blob_img);
self.wrench.api.update_resources(resources);
}
fn test_retained_blob_images_test(&mut self) {
println!("\tretained blob images test...");
let blob_img;
let window_size = self.window.get_inner_size();
let test_size = DeviceUintSize::new(400, 400);
let window_rect = DeviceUintRect::new(
DeviceUintPoint::new(0, window_size.height - test_size.height),
test_size,
);
let layout_size = LayoutSize::new(400., 400.);
let mut resources = ResourceUpdates::new();
{
let api = &self.wrench.api;
blob_img = api.generate_image_key();
resources.add_image(
blob_img,
ImageDescriptor::new(500, 500, ImageFormat::BGRA8, true),
ImageData::new_blob_image(blob::serialize_blob(ColorU::new(50, 50, 150, 255))),
None,
);
}
// draw the blob the first time
let mut builder = DisplayListBuilder::new(self.wrench.root_pipeline_id, layout_size);
let info = LayoutPrimitiveInfo::new(rect(0.0, 60.0, 200.0, 200.0));
builder.push_image(
&info,
size(200.0, 200.0),
size(0.0, 0.0),
ImageRendering::Auto,
AlphaType::PremultipliedAlpha,
blob_img,
);
let mut epoch = Epoch(0);
self.submit_dl(&mut epoch, layout_size, builder, Some(resources));
// draw the blob image a second time at a different location
// make a new display list that refers to the first image
let mut builder = DisplayListBuilder::new(self.wrench.root_pipeline_id, layout_size);
let info = LayoutPrimitiveInfo::new(rect(1.0, 60.0, 200.0, 200.0));
builder.push_image(
&info,
size(200.0, 200.0),
size(0.0, 0.0),
ImageRendering::Auto,
AlphaType::PremultipliedAlpha,
blob_img,
);
self.submit_dl(&mut epoch, layout_size, builder, None);
let called = Arc::new(AtomicIsize::new(0));
let called_inner = Arc::clone(&called);
self.wrench.callbacks.lock().unwrap().request = Box::new(move |_| {
called_inner.fetch_add(1, Ordering::SeqCst);
});
let pixels_first = self.render_and_get_pixels(window_rect);
assert!(called.load(Ordering::SeqCst) == 1);
let pixels_second = self.render_and_get_pixels(window_rect);
// make sure we only requested once
assert!(called.load(Ordering::SeqCst) == 1);
// use png;
// png::save_flipped("out1.png", &pixels_first, window_rect.size);
// png::save_flipped("out2.png", &pixels_second, window_rect.size);
assert!(pixels_first != pixels_second);
}
fn test_blob_update_epoch_test(&mut self) {
println!("\tblob update epoch test...");
let (blob_img, blob_img2);
let window_size = self.window.get_inner_size();
let test_size = DeviceUintSize::new(400, 400);
let window_rect = DeviceUintRect::new(
point(0, window_size.height - test_size.height),
test_size,
);
let layout_size = LayoutSize::new(400., 400.);
let mut resources = ResourceUpdates::new();
let (blob_img, blob_img2) = {
let api = &self.wrench.api;
blob_img = api.generate_image_key();
resources.add_image(
blob_img,
ImageDescriptor::new(500, 500, ImageFormat::BGRA8, true),
ImageData::new_blob_image(blob::serialize_blob(ColorU::new(50, 50, 150, 255))),
None,
);
blob_img2 = api.generate_image_key();
resources.add_image(
blob_img2,
ImageDescriptor::new(500, 500, ImageFormat::BGRA8, true),
ImageData::new_blob_image(blob::serialize_blob(ColorU::new(80, 50, 150, 255))),
None,
);
(blob_img, blob_img2)
};
// setup some counters to count how many times each image is requested
let img1_requested = Arc::new(AtomicIsize::new(0));
let img1_requested_inner = Arc::clone(&img1_requested);
let img2_requested = Arc::new(AtomicIsize::new(0));
let img2_requested_inner = Arc::clone(&img2_requested);
// track the number of times that the second image has been requested
self.wrench.callbacks.lock().unwrap().request = Box::new(move |&desc| {
if desc.key == blob_img {
img1_requested_inner.fetch_add(1, Ordering::SeqCst);
}
if desc.key == blob_img2 {
img2_requested_inner.fetch_add(1, Ordering::SeqCst);
}
});
// create two blob images and draw them
let mut builder = DisplayListBuilder::new(self.wrench.root_pipeline_id, layout_size);
let info = LayoutPrimitiveInfo::new(rect(0.0, 60.0, 200.0, 200.0));
let info2 = LayoutPrimitiveInfo::new(rect(200.0, 60.0, 200.0, 200.0));
let push_images = |builder: &mut DisplayListBuilder| {
builder.push_image(
&info,
size(200.0, 200.0),
size(0.0, 0.0),
ImageRendering::Auto,
AlphaType::PremultipliedAlpha,
blob_img,
);
builder.push_image(
&info2,
size(200.0, 200.0),
size(0.0, 0.0),
ImageRendering::Auto,
AlphaType::PremultipliedAlpha,
blob_img2,
);
};
push_images(&mut builder);
let mut epoch = Epoch(0);
self.submit_dl(&mut epoch, layout_size, builder, Some(resources));
let _pixels_first = self.render_and_get_pixels(window_rect);
// update and redraw both images
let mut resources = ResourceUpdates::new();
resources.update_image(
blob_img,
ImageDescriptor::new(500, 500, ImageFormat::BGRA8, true),
ImageData::new_blob_image(blob::serialize_blob(ColorU::new(50, 50, 150, 255))),
Some(rect(100, 100, 100, 100)),
);
resources.update_image(
blob_img2,
ImageDescriptor::new(500, 500, ImageFormat::BGRA8, true),
ImageData::new_blob_image(blob::serialize_blob(ColorU::new(59, 50, 150, 255))),
Some(rect(100, 100, 100, 100)),
);
let mut builder = DisplayListBuilder::new(self.wrench.root_pipeline_id, layout_size);
push_images(&mut builder);
self.submit_dl(&mut epoch, layout_size, builder, Some(resources));
let _pixels_second = self.render_and_get_pixels(window_rect);
// only update the first image
let mut resources = ResourceUpdates::new();
resources.update_image(
blob_img,
ImageDescriptor::new(500, 500, ImageFormat::BGRA8, true),
ImageData::new_blob_image(blob::serialize_blob(ColorU::new(50, 150, 150, 255))),
Some(rect(200, 200, 100, 100)),
);
let mut builder = DisplayListBuilder::new(self.wrench.root_pipeline_id, layout_size);
push_images(&mut builder);
self.submit_dl(&mut epoch, layout_size, builder, Some(resources));
let _pixels_third = self.render_and_get_pixels(window_rect);
// the first image should be requested 3 times
assert_eq!(img1_requested.load(Ordering::SeqCst), 3);
// the second image should've been requested twice
assert_eq!(img2_requested.load(Ordering::SeqCst), 2);
}
fn test_blob_update_test(&mut self) {
println!("\tblob update test...");
let window_size = self.window.get_inner_size();
let test_size = DeviceUintSize::new(400, 400);
let window_rect = DeviceUintRect::new(
point(0, window_size.height - test_size.height),
test_size,
);
let layout_size = LayoutSize::new(400., 400.);
let mut resources = ResourceUpdates::new();
let blob_img = {
let img = self.wrench.api.generate_image_key();
resources.add_image(
img,
ImageDescriptor::new(500, 500, ImageFormat::BGRA8, true),
ImageData::new_blob_image(blob::serialize_blob(ColorU::new(50, 50, 150, 255))),
None,
);
img
};
// draw the blobs the first time
let mut builder = DisplayListBuilder::new(self.wrench.root_pipeline_id, layout_size);
let info = LayoutPrimitiveInfo::new(rect(0.0, 60.0, 200.0, 200.0));
builder.push_image(
&info,
size(200.0, 200.0),
size(0.0, 0.0),
ImageRendering::Auto,
AlphaType::PremultipliedAlpha,
blob_img,
);
let mut epoch = Epoch(0);
self.submit_dl(&mut epoch, layout_size, builder, Some(resources));
let pixels_first = self.render_and_get_pixels(window_rect);
// draw the blob image a second time after updating it with the same color
let mut resources = ResourceUpdates::new();
resources.update_image(
blob_img,
ImageDescriptor::new(500, 500, ImageFormat::BGRA8, true),
ImageData::new_blob_image(blob::serialize_blob(ColorU::new(50, 50, 150, 255))),
Some(rect(100, 100, 100, 100)),
);
// make a new display list that refers to the first image
let mut builder = DisplayListBuilder::new(self.wrench.root_pipeline_id, layout_size);
let info = LayoutPrimitiveInfo::new(rect(0.0, 60.0, 200.0, 200.0));
builder.push_image(
&info,
size(200.0, 200.0),
size(0.0, 0.0),
ImageRendering::Auto,
AlphaType::PremultipliedAlpha,
blob_img,
);
self.submit_dl(&mut epoch, layout_size, builder, Some(resources));
let pixels_second = self.render_and_get_pixels(window_rect);
// draw the blob image a third time after updating it with a different color
let mut resources = ResourceUpdates::new();
resources.update_image(
blob_img,
ImageDescriptor::new(500, 500, ImageFormat::BGRA8, true),
ImageData::new_blob_image(blob::serialize_blob(ColorU::new(50, 150, 150, 255))),
Some(rect(200, 200, 100, 100)),
);
// make a new display list that refers to the first image
let mut builder = DisplayListBuilder::new(self.wrench.root_pipeline_id, layout_size);
let info = LayoutPrimitiveInfo::new(rect(0.0, 60.0, 200.0, 200.0));
builder.push_image(
&info,
size(200.0, 200.0),
size(0.0, 0.0),
ImageRendering::Auto,
AlphaType::PremultipliedAlpha,
blob_img,
);
self.submit_dl(&mut epoch, layout_size, builder, Some(resources));
let pixels_third = self.render_and_get_pixels(window_rect);
assert!(pixels_first == pixels_second);
assert!(pixels_first != pixels_third);
}
// Ensures that content doing a save-restore produces the same results as not
fn test_save_restore(&mut self) {
println!("\tsave/restore...");
let window_size = self.window.get_inner_size();
let test_size = DeviceUintSize::new(400, 400);
let window_rect = DeviceUintRect::new(
DeviceUintPoint::new(0, window_size.height - test_size.height),
test_size,
);
let layout_size = LayoutSize::new(400., 400.);
let mut do_test = |should_try_and_fail| {
let mut builder = DisplayListBuilder::new(self.wrench.root_pipeline_id, layout_size);
let clip = builder.define_clip(None, rect(110., 120., 200., 200.),
None::<ComplexClipRegion>, None);
builder.push_clip_id(clip);
builder.push_rect(&PrimitiveInfo::new(rect(100., 100., 100., 100.)),
ColorF::new(0.0, 0.0, 1.0, 1.0));
if should_try_and_fail {
builder.save();
let clip = builder.define_clip(None, rect(80., 80., 90., 90.),
None::<ComplexClipRegion>, None);
builder.push_clip_id(clip);
builder.push_rect(&PrimitiveInfo::new(rect(110., 110., 50., 50.)),
ColorF::new(0.0, 1.0, 0.0, 1.0));
builder.push_shadow(&PrimitiveInfo::new(rect(100., 100., 100., 100.)),
Shadow {
offset: LayoutVector2D::new(1.0, 1.0),
blur_radius: 1.0,
color: ColorF::new(0.0, 0.0, 0.0, 1.0),
});
builder.push_line(&PrimitiveInfo::new(rect(110., 110., 50., 2.)),
0.0, LineOrientation::Horizontal,
&ColorF::new(0.0, 0.0, 0.0, 1.0), LineStyle::Solid);
builder.restore();
}
{
builder.save();
let clip = builder.define_clip(None, rect(80., 80., 100., 100.),
None::<ComplexClipRegion>, None);
builder.push_clip_id(clip);
builder.push_rect(&PrimitiveInfo::new(rect(150., 150., 100., 100.)),
ColorF::new(0.0, 0.0, 1.0, 1.0));
builder.pop_clip_id();
builder.clear_save();
}
builder.pop_clip_id();
self.submit_dl(&mut Epoch(0), layout_size, builder, None);
self.render_and_get_pixels(window_rect)
};
let first = do_test(false);
let second = do_test(true);
assert_eq!(first, second);
}
fn test_capture(&mut self) {
println!("\tcapture...");
let path = "../captures/test";
let layout_size = LayoutSize::new(400., 400.);
let dim = self.window.get_inner_size();
let window_rect = DeviceUintRect::new(
point(0, dim.height - layout_size.height as u32),
size(layout_size.width as u32, layout_size.height as u32),
);
// 1. render some scene
let mut resources = ResourceUpdates::new();
let image = self.wrench.api.generate_image_key();
resources.add_image(
image,
ImageDescriptor::new(1, 1, ImageFormat::BGRA8, true),
ImageData::new(vec![0xFF, 0, 0, 0xFF]),
None,
);
let mut builder = DisplayListBuilder::new(self.wrench.root_pipeline_id, layout_size);
builder.push_image(
&LayoutPrimitiveInfo::new(rect(300.0, 70.0, 150.0, 50.0)),
size(150.0, 50.0),
size(0.0, 0.0),
ImageRendering::Auto,
AlphaType::PremultipliedAlpha,
image,
);
let mut txn = Transaction::new();
txn.set_display_list(
Epoch(0),
Some(ColorF::new(1.0, 1.0, 1.0, 1.0)),
layout_size,
builder.finalize(),
false,
);
txn.generate_frame();
self.wrench.api.send_transaction(self.wrench.document_id, txn);
let pixels0 = self.render_and_get_pixels(window_rect);
// 2. capture it
self.wrench.api.save_capture(path.into(), CaptureBits::all());
self.rx.recv().unwrap();
// 3. set a different scene
builder = DisplayListBuilder::new(self.wrench.root_pipeline_id, layout_size);
let mut txn = Transaction::new();
txn.set_display_list(
Epoch(1),
Some(ColorF::new(1.0, 0.0, 0.0, 1.0)),
layout_size,
builder.finalize(),
false,
);
self.wrench.api.send_transaction(self.wrench.document_id, txn);
// 4. load the first one
let mut documents = self.wrench.api.load_capture(path.into());
let captured = documents.swap_remove(0);
// 5. render the built frame and compare
let pixels1 = self.render_and_get_pixels(window_rect);
assert!(pixels0 == pixels1);
// 6. rebuild the scene and compare again
let mut txn = Transaction::new();
txn.set_root_pipeline(captured.root_pipeline_id.unwrap());
txn.generate_frame();
self.wrench.api.send_transaction(captured.document_id, txn);
let pixels2 = self.render_and_get_pixels(window_rect);
assert!(pixels0 == pixels2);
}
}

495
gfx/wrench/src/reftest.rs Normal file
View File

@ -0,0 +1,495 @@
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use WindowWrapper;
use base64;
use image::load as load_piston_image;
use image::png::PNGEncoder;
use image::{ColorType, ImageFormat};
use parse_function::parse_function;
use png::save_flipped;
use std::cmp;
use std::fmt::{Display, Error, Formatter};
use std::fs::File;
use std::io::{BufRead, BufReader};
use std::path::{Path, PathBuf};
use std::sync::mpsc::Receiver;
use webrender::RendererStats;
use webrender::api::*;
use wrench::{Wrench, WrenchThing};
use yaml_frame_reader::YamlFrameReader;
#[cfg(target_os = "windows")]
const PLATFORM: &str = "win";
#[cfg(target_os = "linux")]
const PLATFORM: &str = "linux";
#[cfg(target_os = "macos")]
const PLATFORM: &str = "mac";
#[cfg(not(any(target_os = "linux", target_os = "macos", target_os = "windows")))]
const PLATFORM: &str = "other";
const OPTION_DISABLE_SUBPX: &str = "disable-subpixel";
const OPTION_DISABLE_AA: &str = "disable-aa";
const OPTION_DISABLE_DUAL_SOURCE_BLENDING: &str = "disable-dual-source-blending";
pub struct ReftestOptions {
// These override values that are lower.
pub allow_max_difference: usize,
pub allow_num_differences: usize,
}
impl ReftestOptions {
pub fn default() -> Self {
ReftestOptions {
allow_max_difference: 0,
allow_num_differences: 0,
}
}
}
pub enum ReftestOp {
Equal,
NotEqual,
}
impl Display for ReftestOp {
fn fmt(&self, f: &mut Formatter) -> Result<(), Error> {
write!(
f,
"{}",
match *self {
ReftestOp::Equal => "==".to_owned(),
ReftestOp::NotEqual => "!=".to_owned(),
}
)
}
}
pub struct Reftest {
op: ReftestOp,
test: PathBuf,
reference: PathBuf,
font_render_mode: Option<FontRenderMode>,
max_difference: usize,
num_differences: usize,
expected_draw_calls: Option<usize>,
expected_alpha_targets: Option<usize>,
expected_color_targets: Option<usize>,
disable_dual_source_blending: bool,
zoom_factor: f32,
}
impl Display for Reftest {
fn fmt(&self, f: &mut Formatter) -> Result<(), Error> {
write!(
f,
"{} {} {}",
self.test.display(),
self.op,
self.reference.display()
)
}
}
struct ReftestImage {
data: Vec<u8>,
size: DeviceUintSize,
}
enum ReftestImageComparison {
Equal,
NotEqual {
max_difference: usize,
count_different: usize,
},
}
impl ReftestImage {
fn compare(&self, other: &ReftestImage) -> ReftestImageComparison {
assert_eq!(self.size, other.size);
assert_eq!(self.data.len(), other.data.len());
assert_eq!(self.data.len() % 4, 0);
let mut count = 0;
let mut max = 0;
for (a, b) in self.data.chunks(4).zip(other.data.chunks(4)) {
if a != b {
let pixel_max = a.iter()
.zip(b.iter())
.map(|(x, y)| (*x as isize - *y as isize).abs() as usize)
.max()
.unwrap();
count += 1;
max = cmp::max(max, pixel_max);
}
}
if count != 0 {
ReftestImageComparison::NotEqual {
max_difference: max,
count_different: count,
}
} else {
ReftestImageComparison::Equal
}
}
fn create_data_uri(mut self) -> String {
let width = self.size.width;
let height = self.size.height;
// flip image vertically (texture is upside down)
let orig_pixels = self.data.clone();
let stride = width as usize * 4;
for y in 0 .. height as usize {
let dst_start = y * stride;
let src_start = (height as usize - y - 1) * stride;
let src_slice = &orig_pixels[src_start .. src_start + stride];
(&mut self.data[dst_start .. dst_start + stride])
.clone_from_slice(&src_slice[.. stride]);
}
let mut png: Vec<u8> = vec![];
{
let encoder = PNGEncoder::new(&mut png);
encoder
.encode(&self.data[..], width, height, ColorType::RGBA(8))
.expect("Unable to encode PNG!");
}
let png_base64 = base64::encode(&png);
format!("data:image/png;base64,{}", png_base64)
}
}
struct ReftestManifest {
reftests: Vec<Reftest>,
}
impl ReftestManifest {
fn new(manifest: &Path, options: &ReftestOptions) -> ReftestManifest {
let dir = manifest.parent().unwrap();
let f =
File::open(manifest).expect(&format!("couldn't open manifest: {}", manifest.display()));
let file = BufReader::new(&f);
let mut reftests = Vec::new();
for line in file.lines() {
let l = line.unwrap();
// strip the comments
let s = &l[0 .. l.find('#').unwrap_or(l.len())];
let s = s.trim();
if s.is_empty() {
continue;
}
let tokens: Vec<&str> = s.split_whitespace().collect();
let mut max_difference = 0;
let mut max_count = 0;
let mut op = ReftestOp::Equal;
let mut font_render_mode = None;
let mut expected_color_targets = None;
let mut expected_alpha_targets = None;
let mut expected_draw_calls = None;
let mut disable_dual_source_blending = false;
let mut zoom_factor = 1.0;
for (i, token) in tokens.iter().enumerate() {
match *token {
"include" => {
assert!(i == 0, "include must be by itself");
let include = dir.join(tokens[1]);
reftests.append(
&mut ReftestManifest::new(include.as_path(), options).reftests,
);
break;
}
platform if platform.starts_with("platform") => {
let (_, args, _) = parse_function(platform);
if !args.iter().any(|arg| arg == &PLATFORM) {
// Skip due to platform not matching
break;
}
}
function if function.starts_with("zoom") => {
let (_, args, _) = parse_function(function);
zoom_factor = args[0].parse().unwrap();
}
function if function.starts_with("fuzzy") => {
let (_, args, _) = parse_function(function);
max_difference = args[0].parse().unwrap();
max_count = args[1].parse().unwrap();
}
function if function.starts_with("draw_calls") => {
let (_, args, _) = parse_function(function);
expected_draw_calls = Some(args[0].parse().unwrap());
}
function if function.starts_with("alpha_targets") => {
let (_, args, _) = parse_function(function);
expected_alpha_targets = Some(args[0].parse().unwrap());
}
function if function.starts_with("color_targets") => {
let (_, args, _) = parse_function(function);
expected_color_targets = Some(args[0].parse().unwrap());
}
options if options.starts_with("options") => {
let (_, args, _) = parse_function(options);
if args.iter().any(|arg| arg == &OPTION_DISABLE_SUBPX) {
font_render_mode = Some(FontRenderMode::Alpha);
}
if args.iter().any(|arg| arg == &OPTION_DISABLE_AA) {
font_render_mode = Some(FontRenderMode::Mono);
}
if args.iter().any(|arg| arg == &OPTION_DISABLE_DUAL_SOURCE_BLENDING) {
disable_dual_source_blending = true;
}
}
"==" => {
op = ReftestOp::Equal;
}
"!=" => {
op = ReftestOp::NotEqual;
}
_ => {
reftests.push(Reftest {
op,
test: dir.join(tokens[i + 0]),
reference: dir.join(tokens[i + 1]),
font_render_mode,
max_difference: cmp::max(max_difference, options.allow_max_difference),
num_differences: cmp::max(max_count, options.allow_num_differences),
expected_draw_calls,
expected_alpha_targets,
expected_color_targets,
disable_dual_source_blending,
zoom_factor,
});
break;
}
}
}
}
ReftestManifest { reftests: reftests }
}
fn find(&self, prefix: &Path) -> Vec<&Reftest> {
self.reftests
.iter()
.filter(|x| {
x.test.starts_with(prefix) || x.reference.starts_with(prefix)
})
.collect()
}
}
pub struct ReftestHarness<'a> {
wrench: &'a mut Wrench,
window: &'a mut WindowWrapper,
rx: Receiver<()>,
}
impl<'a> ReftestHarness<'a> {
pub fn new(wrench: &'a mut Wrench, window: &'a mut WindowWrapper, rx: Receiver<()>) -> Self {
ReftestHarness { wrench, window, rx }
}
pub fn run(mut self, base_manifest: &Path, reftests: Option<&Path>, options: &ReftestOptions) {
let manifest = ReftestManifest::new(base_manifest, options);
let reftests = manifest.find(reftests.unwrap_or(&PathBuf::new()));
let mut total_passing = 0;
let mut failing = Vec::new();
for t in reftests {
if self.run_reftest(t) {
total_passing += 1;
} else {
failing.push(t);
}
}
println!(
"REFTEST INFO | {} passing, {} failing",
total_passing,
failing.len()
);
if !failing.is_empty() {
println!("\nReftests with unexpected results:");
for reftest in &failing {
println!("\t{}", reftest);
}
}
// panic here so that we fail CI
assert!(failing.is_empty());
}
fn run_reftest(&mut self, t: &Reftest) -> bool {
println!("REFTEST {}", t);
self.wrench.set_page_zoom(ZoomFactor::new(t.zoom_factor));
if t.disable_dual_source_blending {
self.wrench
.api
.send_debug_cmd(
DebugCommand::EnableDualSourceBlending(false)
);
}
let window_size = self.window.get_inner_size();
let reference = match t.reference.extension().unwrap().to_str().unwrap() {
"yaml" => {
let (reference, _) = self.render_yaml(
t.reference.as_path(),
window_size,
t.font_render_mode,
);
reference
}
"png" => {
self.load_image(t.reference.as_path(), ImageFormat::PNG)
}
other => panic!("Unknown reftest extension: {}", other),
};
// the reference can be smaller than the window size,
// in which case we only compare the intersection
let (test, stats) = self.render_yaml(
t.test.as_path(),
reference.size,
t.font_render_mode,
);
if t.disable_dual_source_blending {
self.wrench
.api
.send_debug_cmd(
DebugCommand::EnableDualSourceBlending(true)
);
}
let comparison = test.compare(&reference);
if let Some(expected_draw_calls) = t.expected_draw_calls {
if expected_draw_calls != stats.total_draw_calls {
println!("REFTEST TEST-UNEXPECTED-FAIL | {}/{} | expected_draw_calls",
stats.total_draw_calls,
expected_draw_calls
);
println!("REFTEST TEST-END | {}", t);
return false;
}
}
if let Some(expected_alpha_targets) = t.expected_alpha_targets {
if expected_alpha_targets != stats.alpha_target_count {
println!("REFTEST TEST-UNEXPECTED-FAIL | {}/{} | alpha_target_count",
stats.alpha_target_count,
expected_alpha_targets
);
println!("REFTEST TEST-END | {}", t);
return false;
}
}
if let Some(expected_color_targets) = t.expected_color_targets {
if expected_color_targets != stats.color_target_count {
println!("REFTEST TEST-UNEXPECTED-FAIL | {}/{} | color_target_count",
stats.color_target_count,
expected_color_targets
);
println!("REFTEST TEST-END | {}", t);
return false;
}
}
match (&t.op, comparison) {
(&ReftestOp::Equal, ReftestImageComparison::Equal) => true,
(
&ReftestOp::Equal,
ReftestImageComparison::NotEqual {
max_difference,
count_different,
},
) => if max_difference > t.max_difference || count_different > t.num_differences {
println!(
"{} | {} | {}: {}, {}: {}",
"REFTEST TEST-UNEXPECTED-FAIL",
t,
"image comparison, max difference",
max_difference,
"number of differing pixels",
count_different
);
println!("REFTEST IMAGE 1 (TEST): {}", test.create_data_uri());
println!(
"REFTEST IMAGE 2 (REFERENCE): {}",
reference.create_data_uri()
);
println!("REFTEST TEST-END | {}", t);
false
} else {
true
},
(&ReftestOp::NotEqual, ReftestImageComparison::Equal) => {
println!("REFTEST TEST-UNEXPECTED-FAIL | {} | image comparison", t);
println!("REFTEST TEST-END | {}", t);
false
}
(&ReftestOp::NotEqual, ReftestImageComparison::NotEqual { .. }) => true,
}
}
fn load_image(&mut self, filename: &Path, format: ImageFormat) -> ReftestImage {
let file = BufReader::new(File::open(filename).unwrap());
let img_raw = load_piston_image(file, format).unwrap();
let img = img_raw.flipv().to_rgba();
let size = img.dimensions();
ReftestImage {
data: img.into_raw(),
size: DeviceUintSize::new(size.0, size.1),
}
}
fn render_yaml(
&mut self,
filename: &Path,
size: DeviceUintSize,
font_render_mode: Option<FontRenderMode>,
) -> (ReftestImage, RendererStats) {
let mut reader = YamlFrameReader::new(filename);
reader.set_font_render_mode(font_render_mode);
reader.do_frame(self.wrench);
// wait for the frame
self.rx.recv().unwrap();
let stats = self.wrench.render();
let window_size = self.window.get_inner_size();
assert!(size.width <= window_size.width && size.height <= window_size.height);
// taking the bottom left sub-rectangle
let rect = DeviceUintRect::new(DeviceUintPoint::new(0, window_size.height - size.height), size);
let pixels = self.wrench.renderer.read_pixels_rgba8(rect);
self.window.swap_buffers();
let write_debug_images = false;
if write_debug_images {
let debug_path = filename.with_extension("yaml.png");
save_flipped(debug_path, pixels.clone(), size);
}
reader.deinit(self.wrench);
(ReftestImage { data: pixels, size }, stats)
}
}

View File

@ -0,0 +1,195 @@
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use ron;
use std::collections::HashMap;
use std::io::Write;
use std::path::{Path, PathBuf};
use std::{fmt, fs};
use super::CURRENT_FRAME_NUMBER;
use webrender;
use webrender::api::*;
use webrender::api::channel::Payload;
enum CachedFont {
Native(NativeFontHandle),
Raw(Option<Vec<u8>>, u32, Option<PathBuf>),
}
struct CachedImage {
width: u32,
height: u32,
format: ImageFormat,
bytes: Option<Vec<u8>>,
path: Option<PathBuf>,
}
pub struct RonFrameWriter {
frame_base: PathBuf,
images: HashMap<ImageKey, CachedImage>,
fonts: HashMap<FontKey, CachedFont>,
last_frame_written: u32,
dl_descriptor: Option<BuiltDisplayListDescriptor>,
}
impl RonFrameWriter {
pub fn new(path: &Path) -> Self {
let mut rsrc_base = path.to_owned();
rsrc_base.push("res");
fs::create_dir_all(&rsrc_base).ok();
RonFrameWriter {
frame_base: path.to_owned(),
images: HashMap::new(),
fonts: HashMap::new(),
dl_descriptor: None,
last_frame_written: u32::max_value(),
}
}
pub fn begin_write_display_list(
&mut self,
_: &Epoch,
_: &PipelineId,
_: &Option<ColorF>,
_: &LayoutSize,
display_list: &BuiltDisplayListDescriptor,
) {
unsafe {
if CURRENT_FRAME_NUMBER == self.last_frame_written {
return;
}
self.last_frame_written = CURRENT_FRAME_NUMBER;
}
self.dl_descriptor = Some(display_list.clone());
}
pub fn finish_write_display_list(&mut self, _frame: u32, data: &[u8]) {
let payload = Payload::from_data(data);
let dl_desc = self.dl_descriptor.take().unwrap();
let dl = BuiltDisplayList::from_data(payload.display_list_data, dl_desc);
let mut frame_file_name = self.frame_base.clone();
let current_shown_frame = unsafe { CURRENT_FRAME_NUMBER };
frame_file_name.push(format!("frame-{}.ron", current_shown_frame));
let mut file = fs::File::create(&frame_file_name).unwrap();
let s = ron::ser::to_string_pretty(&dl, Default::default()).unwrap();
file.write_all(&s.into_bytes()).unwrap();
file.write_all(b"\n").unwrap();
}
fn update_resources(&mut self, updates: &ResourceUpdates) {
for update in &updates.updates {
match *update {
ResourceUpdate::AddImage(ref img) => {
let bytes = match img.data {
ImageData::Raw(ref v) => (**v).clone(),
ImageData::External(_) | ImageData::Blob(_) => {
return;
}
};
self.images.insert(
img.key,
CachedImage {
width: img.descriptor.width,
height: img.descriptor.height,
format: img.descriptor.format,
bytes: Some(bytes),
path: None,
},
);
}
ResourceUpdate::UpdateImage(ref img) => {
if let Some(ref mut data) = self.images.get_mut(&img.key) {
assert_eq!(data.width, img.descriptor.width);
assert_eq!(data.height, img.descriptor.height);
assert_eq!(data.format, img.descriptor.format);
if let ImageData::Raw(ref bytes) = img.data {
data.path = None;
data.bytes = Some((**bytes).clone());
} else {
// Other existing image types only make sense within the gecko integration.
println!(
"Wrench only supports updating buffer images ({}).",
"ignoring update commands"
);
}
}
}
ResourceUpdate::DeleteImage(img) => {
self.images.remove(&img);
}
ResourceUpdate::AddFont(ref font) => match font {
&AddFont::Raw(key, ref bytes, index) => {
self.fonts
.insert(key, CachedFont::Raw(Some(bytes.clone()), index, None));
}
&AddFont::Native(key, ref handle) => {
self.fonts.insert(key, CachedFont::Native(handle.clone()));
}
},
ResourceUpdate::DeleteFont(_) => {}
ResourceUpdate::AddFontInstance(_) => {}
ResourceUpdate::DeleteFontInstance(_) => {}
}
}
}
}
impl fmt::Debug for RonFrameWriter {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "RonFrameWriter")
}
}
impl webrender::ApiRecordingReceiver for RonFrameWriter {
fn write_msg(&mut self, _: u32, msg: &ApiMsg) {
match *msg {
ApiMsg::UpdateResources(ref updates) => self.update_resources(updates),
ApiMsg::UpdateDocument(_, ref doc_msgs) => {
for doc_msg in doc_msgs {
match *doc_msg {
DocumentMsg::UpdateResources(ref resources) => {
self.update_resources(resources);
}
DocumentMsg::SetDisplayList {
ref epoch,
ref pipeline_id,
ref background,
ref viewport_size,
ref list_descriptor,
..
} => {
self.begin_write_display_list(
epoch,
pipeline_id,
background,
viewport_size,
list_descriptor,
);
}
_ => {}
}
}
}
ApiMsg::CloneApi(..) => {}
_ => {}
}
}
fn write_payload(&mut self, frame: u32, data: &[u8]) {
if self.dl_descriptor.is_some() {
self.finish_write_display_list(frame, data);
}
}
}

134
gfx/wrench/src/scene.rs Normal file
View File

@ -0,0 +1,134 @@
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use std::collections::HashMap;
use webrender::api::{BuiltDisplayList, ColorF, Epoch};
use webrender::api::{LayerSize, PipelineId};
use webrender::api::{PropertyBinding, PropertyBindingId, LayoutTransform, DynamicProperties};
/// Stores a map of the animated property bindings for the current display list. These
/// can be used to animate the transform and/or opacity of a display list without
/// re-submitting the display list itself.
#[derive(Default)]
pub struct SceneProperties {
transform_properties: HashMap<PropertyBindingId, LayoutTransform>,
float_properties: HashMap<PropertyBindingId, f32>,
}
impl SceneProperties {
/// Set the current property list for this display list.
pub fn set_properties(&mut self, properties: &DynamicProperties) {
self.transform_properties.clear();
self.float_properties.clear();
for property in &properties.transforms {
self.transform_properties
.insert(property.key.id, property.value);
}
for property in &properties.floats {
self.float_properties
.insert(property.key.id, property.value);
}
}
/// Get the current value for a transform property.
pub fn resolve_layout_transform(
&self,
property: &Option<PropertyBinding<LayoutTransform>>,
) -> LayoutTransform {
let property = match *property {
Some(property) => property,
None => return LayoutTransform::identity(),
};
match property {
PropertyBinding::Value(matrix) => matrix,
PropertyBinding::Binding(ref key) => self.transform_properties
.get(&key.id)
.cloned()
.unwrap_or_else(|| {
println!("Property binding {:?} has an invalid value.", key);
LayoutTransform::identity()
}),
}
}
/// Get the current value for a float property.
pub fn resolve_float(&self, property: &PropertyBinding<f32>, default_value: f32) -> f32 {
match *property {
PropertyBinding::Value(value) => value,
PropertyBinding::Binding(ref key) => self.float_properties
.get(&key.id)
.cloned()
.unwrap_or_else(|| {
println!("Property binding {:?} has an invalid value.", key);
default_value
}),
}
}
}
/// A representation of the layout within the display port for a given document or iframe.
#[derive(Debug)]
pub struct ScenePipeline {
pub epoch: Epoch,
pub viewport_size: LayerSize,
pub background_color: Option<ColorF>,
}
/// A complete representation of the layout bundling visible pipelines together.
pub struct Scene {
pub properties: SceneProperties,
pub root_pipeline_id: Option<PipelineId>,
pub pipeline_map: HashMap<PipelineId, ScenePipeline>,
pub display_lists: HashMap<PipelineId, BuiltDisplayList>,
}
impl Scene {
pub fn new() -> Scene {
Scene {
properties: SceneProperties::default(),
root_pipeline_id: None,
pipeline_map: HashMap::default(),
display_lists: HashMap::default(),
}
}
pub fn set_root_pipeline_id(&mut self, pipeline_id: PipelineId) {
self.root_pipeline_id = Some(pipeline_id);
}
pub fn remove_pipeline(&mut self, pipeline_id: &PipelineId) {
if self.root_pipeline_id == Some(*pipeline_id) {
self.root_pipeline_id = None;
}
self.pipeline_map.remove(pipeline_id);
self.display_lists.remove(pipeline_id);
}
pub fn begin_display_list(
&mut self,
pipeline_id: &PipelineId,
epoch: &Epoch,
background_color: &Option<ColorF>,
viewport_size: &LayerSize,
) {
let new_pipeline = ScenePipeline {
epoch: epoch.clone(),
viewport_size: viewport_size.clone(),
background_color: background_color.clone(),
};
self.pipeline_map.insert(pipeline_id.clone(), new_pipeline);
}
pub fn finish_display_list(
&mut self,
pipeline_id: PipelineId,
built_display_list: BuiltDisplayList,
) {
self.display_lists.insert(pipeline_id, built_display_list);
}
}

595
gfx/wrench/src/wrench.rs Normal file
View File

@ -0,0 +1,595 @@
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use app_units::Au;
use blob;
use crossbeam::sync::chase_lev;
#[cfg(windows)]
use dwrote;
#[cfg(any(target_os = "linux", target_os = "macos"))]
use font_loader::system_fonts;
use glutin::WindowProxy;
use json_frame_writer::JsonFrameWriter;
use ron_frame_writer::RonFrameWriter;
use std::collections::HashMap;
use std::path::PathBuf;
use std::sync::{Arc, Mutex};
use time;
use webrender;
use webrender::api::*;
use webrender::{DebugFlags, RendererStats};
use yaml_frame_writer::YamlFrameWriterReceiver;
use {WindowWrapper, BLACK_COLOR, WHITE_COLOR};
// TODO(gw): This descriptor matches what we currently support for fonts
// but is quite a mess. We should at least document and
// use better types for things like the style and stretch.
#[derive(Debug, Clone, Hash, PartialEq, Eq)]
pub enum FontDescriptor {
Path { path: PathBuf, font_index: u32 },
Family { name: String },
Properties {
family: String,
weight: u32,
style: u32,
stretch: u32,
},
}
pub enum SaveType {
Yaml,
Json,
Ron,
Binary,
}
struct NotifierData {
window_proxy: Option<WindowProxy>,
frames_notified: u32,
timing_receiver: chase_lev::Stealer<time::SteadyTime>,
verbose: bool,
}
impl NotifierData {
fn new(
window_proxy: Option<WindowProxy>,
timing_receiver: chase_lev::Stealer<time::SteadyTime>,
verbose: bool,
) -> Self {
NotifierData {
window_proxy,
frames_notified: 0,
timing_receiver,
verbose,
}
}
}
struct Notifier(Arc<Mutex<NotifierData>>);
impl RenderNotifier for Notifier {
fn clone(&self) -> Box<RenderNotifier> {
Box::new(Notifier(self.0.clone()))
}
fn wake_up(&self) {
let mut data = self.0.lock();
let data = data.as_mut().unwrap();
match data.timing_receiver.steal() {
chase_lev::Steal::Data(last_timing) => {
data.frames_notified += 1;
if data.verbose && data.frames_notified == 600 {
let elapsed = time::SteadyTime::now() - last_timing;
println!(
"frame latency (consider queue depth here): {:3.6} ms",
elapsed.num_microseconds().unwrap() as f64 / 1000.
);
data.frames_notified = 0;
}
}
_ => {
println!("Notified of frame, but no frame was ready?");
}
}
if let Some(ref window_proxy) = data.window_proxy {
#[cfg(not(target_os = "android"))]
window_proxy.wakeup_event_loop();
}
}
fn new_document_ready(&self, _: DocumentId, scrolled: bool, _composite_needed: bool) {
if scrolled {
let data = self.0.lock();
if let Some(ref window_proxy) = data.unwrap().window_proxy {
#[cfg(not(target_os = "android"))]
window_proxy.wakeup_event_loop();
}
} else {
self.wake_up();
}
}
}
pub trait WrenchThing {
fn next_frame(&mut self);
fn prev_frame(&mut self);
fn do_frame(&mut self, &mut Wrench) -> u32;
fn queue_frames(&self) -> u32 {
0
}
}
impl WrenchThing for CapturedDocument {
fn next_frame(&mut self) {}
fn prev_frame(&mut self) {}
fn do_frame(&mut self, wrench: &mut Wrench) -> u32 {
match self.root_pipeline_id.take() {
Some(root_pipeline_id) => {
// skip the first frame - to not overwrite the loaded one
let mut txn = Transaction::new();
txn.set_root_pipeline(root_pipeline_id);
wrench.api.send_transaction(self.document_id, txn);
}
None => {
wrench.refresh();
}
}
0
}
}
pub struct Wrench {
window_size: DeviceUintSize,
device_pixel_ratio: f32,
page_zoom_factor: ZoomFactor,
pub renderer: webrender::Renderer,
pub api: RenderApi,
pub document_id: DocumentId,
pub root_pipeline_id: PipelineId,
window_title_to_set: Option<String>,
graphics_api: webrender::GraphicsApiInfo,
pub rebuild_display_lists: bool,
pub verbose: bool,
pub frame_start_sender: chase_lev::Worker<time::SteadyTime>,
pub callbacks: Arc<Mutex<blob::BlobCallbacks>>,
}
impl Wrench {
pub fn new(
window: &mut WindowWrapper,
shader_override_path: Option<PathBuf>,
dp_ratio: f32,
save_type: Option<SaveType>,
size: DeviceUintSize,
do_rebuild: bool,
no_subpixel_aa: bool,
debug: bool,
verbose: bool,
no_scissor: bool,
no_batch: bool,
precache_shaders: bool,
disable_dual_source_blending: bool,
zoom_factor: f32,
notifier: Option<Box<RenderNotifier>>,
) -> Self {
println!("Shader override path: {:?}", shader_override_path);
let recorder = save_type.map(|save_type| match save_type {
SaveType::Yaml => Box::new(
YamlFrameWriterReceiver::new(&PathBuf::from("yaml_frames")),
) as Box<webrender::ApiRecordingReceiver>,
SaveType::Json => Box::new(JsonFrameWriter::new(&PathBuf::from("json_frames"))) as
Box<webrender::ApiRecordingReceiver>,
SaveType::Ron => Box::new(RonFrameWriter::new(&PathBuf::from("ron_frames"))) as
Box<webrender::ApiRecordingReceiver>,
SaveType::Binary => Box::new(webrender::BinaryRecorder::new(
&PathBuf::from("wr-record.bin"),
)) as Box<webrender::ApiRecordingReceiver>,
});
let mut debug_flags = DebugFlags::default();
debug_flags.set(DebugFlags::DISABLE_BATCHING, no_batch);
let callbacks = Arc::new(Mutex::new(blob::BlobCallbacks::new()));
let opts = webrender::RendererOptions {
device_pixel_ratio: dp_ratio,
resource_override_path: shader_override_path,
recorder,
enable_subpixel_aa: !no_subpixel_aa,
debug,
debug_flags,
enable_clear_scissor: !no_scissor,
max_recorded_profiles: 16,
precache_shaders,
blob_image_renderer: Some(Box::new(blob::CheckerboardRenderer::new(callbacks.clone()))),
disable_dual_source_blending,
..Default::default()
};
let proxy = window.create_window_proxy();
// put an Awakened event into the queue to kick off the first frame
if let Some(ref wp) = proxy {
#[cfg(not(target_os = "android"))]
wp.wakeup_event_loop();
}
let (timing_sender, timing_receiver) = chase_lev::deque();
let notifier = notifier.unwrap_or_else(|| {
let data = Arc::new(Mutex::new(NotifierData::new(proxy, timing_receiver, verbose)));
Box::new(Notifier(data))
});
let (renderer, sender) = webrender::Renderer::new(window.clone_gl(), notifier, opts).unwrap();
let api = sender.create_api();
let document_id = api.add_document(size, 0);
let graphics_api = renderer.get_graphics_api_info();
let zoom_factor = ZoomFactor::new(zoom_factor);
let mut wrench = Wrench {
window_size: size,
renderer,
api,
document_id,
window_title_to_set: None,
rebuild_display_lists: do_rebuild,
verbose,
device_pixel_ratio: dp_ratio,
page_zoom_factor: zoom_factor,
root_pipeline_id: PipelineId(0, 0),
graphics_api,
frame_start_sender: timing_sender,
callbacks,
};
wrench.set_page_zoom(zoom_factor);
wrench.set_title("start");
let mut txn = Transaction::new();
txn.set_root_pipeline(wrench.root_pipeline_id);
wrench.api.send_transaction(wrench.document_id, txn);
wrench
}
pub fn get_page_zoom(&self) -> ZoomFactor {
self.page_zoom_factor
}
pub fn set_page_zoom(&mut self, zoom_factor: ZoomFactor) {
self.page_zoom_factor = zoom_factor;
let mut txn = Transaction::new();
txn.set_page_zoom(self.page_zoom_factor);
self.api.send_transaction(self.document_id, txn);
self.set_title("");
}
pub fn layout_simple_ascii(
&mut self,
font_key: FontKey,
instance_key: FontInstanceKey,
render_mode: Option<FontRenderMode>,
text: &str,
size: Au,
origin: LayerPoint,
flags: FontInstanceFlags,
) -> (Vec<u32>, Vec<LayerPoint>, LayoutRect) {
// Map the string codepoints to glyph indices in this font.
// Just drop any glyph that isn't present in this font.
let indices: Vec<u32> = self.api
.get_glyph_indices(font_key, text)
.iter()
.filter_map(|idx| *idx)
.collect();
let render_mode = render_mode.unwrap_or(<FontInstanceOptions as Default>::default().render_mode);
let subpx_dir = SubpixelDirection::Horizontal.limit_by(render_mode);
// Retrieve the metrics for each glyph.
let mut keys = Vec::new();
for glyph_index in &indices {
keys.push(GlyphKey::new(
*glyph_index,
LayerPoint::zero(),
render_mode,
subpx_dir,
));
}
let metrics = self.api.get_glyph_dimensions(instance_key, keys);
let mut bounding_rect = LayoutRect::zero();
let mut positions = Vec::new();
let mut cursor = origin;
let direction = if flags.contains(FontInstanceFlags::TRANSPOSE) {
LayerVector2D::new(
0.0,
if flags.contains(FontInstanceFlags::FLIP_Y) { -1.0 } else { 1.0 },
)
} else {
LayerVector2D::new(
if flags.contains(FontInstanceFlags::FLIP_X) { -1.0 } else { 1.0 },
0.0,
)
};
for metric in metrics {
positions.push(cursor);
match metric {
Some(metric) => {
let glyph_rect = LayoutRect::new(
LayoutPoint::new(cursor.x + metric.left as f32, cursor.y - metric.top as f32),
LayoutSize::new(metric.width as f32, metric.height as f32)
);
bounding_rect = bounding_rect.union(&glyph_rect);
cursor += direction * metric.advance;
}
None => {
// Extract the advances from the metrics. The get_glyph_dimensions API
// has a limitation that it can't currently get dimensions for non-renderable
// glyphs (e.g. spaces), so just use a rough estimate in that case.
let space_advance = size.to_f32_px() / 3.0;
cursor += direction * space_advance;
}
}
}
// The platform font implementations don't always handle
// the exact dimensions used when subpixel AA is enabled
// on glyphs. As a workaround, inflate the bounds by
// 2 pixels on either side, to give a slightly less
// tight fitting bounding rect.
let bounding_rect = bounding_rect.inflate(2.0, 2.0);
(indices, positions, bounding_rect)
}
pub fn set_title(&mut self, extra: &str) {
self.window_title_to_set = Some(format!(
"Wrench: {} ({}x zoom={}) - {} - {}",
extra,
self.device_pixel_ratio,
self.page_zoom_factor.get(),
self.graphics_api.renderer,
self.graphics_api.version
));
}
pub fn take_title(&mut self) -> Option<String> {
self.window_title_to_set.take()
}
pub fn should_rebuild_display_lists(&self) -> bool {
self.rebuild_display_lists
}
pub fn window_size_f32(&self) -> LayoutSize {
LayoutSize::new(
self.window_size.width as f32,
self.window_size.height as f32,
)
}
#[cfg(target_os = "windows")]
pub fn font_key_from_native_handle(&mut self, descriptor: &NativeFontHandle) -> FontKey {
let key = self.api.generate_font_key();
let mut resources = ResourceUpdates::new();
resources.add_native_font(key, descriptor.clone());
self.api.update_resources(resources);
key
}
#[cfg(target_os = "windows")]
pub fn font_key_from_name(&mut self, font_name: &str) -> FontKey {
let system_fc = dwrote::FontCollection::system();
let family = system_fc.get_font_family_by_name(font_name).unwrap();
let font = family.get_first_matching_font(
dwrote::FontWeight::Regular,
dwrote::FontStretch::Normal,
dwrote::FontStyle::Normal,
);
let descriptor = font.to_descriptor();
self.font_key_from_native_handle(&descriptor)
}
#[cfg(target_os = "windows")]
pub fn font_key_from_properties(
&mut self,
family: &str,
weight: u32,
style: u32,
stretch: u32,
) -> FontKey {
let weight = dwrote::FontWeight::from_u32(weight);
let style = dwrote::FontStyle::from_u32(style);
let stretch = dwrote::FontStretch::from_u32(stretch);
let desc = dwrote::FontDescriptor {
family_name: family.to_owned(),
weight,
style,
stretch,
};
self.font_key_from_native_handle(&desc)
}
#[cfg(any(target_os = "linux", target_os = "macos"))]
pub fn font_key_from_properties(
&mut self,
family: &str,
_weight: u32,
_style: u32,
_stretch: u32,
) -> FontKey {
let property = system_fonts::FontPropertyBuilder::new()
.family(family)
.build();
let (font, index) = system_fonts::get(&property).unwrap();
self.font_key_from_bytes(font, index as u32)
}
#[cfg(unix)]
pub fn font_key_from_name(&mut self, font_name: &str) -> FontKey {
let property = system_fonts::FontPropertyBuilder::new()
.family(font_name)
.build();
let (font, index) = system_fonts::get(&property).unwrap();
self.font_key_from_bytes(font, index as u32)
}
#[cfg(target_os = "android")]
pub fn font_key_from_name(&mut self, font_name: &str) -> FontKey {
unimplemented!()
}
pub fn font_key_from_bytes(&mut self, bytes: Vec<u8>, index: u32) -> FontKey {
let key = self.api.generate_font_key();
let mut update = ResourceUpdates::new();
update.add_raw_font(key, bytes, index);
self.api.update_resources(update);
key
}
pub fn add_font_instance(&mut self,
font_key: FontKey,
size: Au,
flags: FontInstanceFlags,
render_mode: Option<FontRenderMode>,
) -> FontInstanceKey {
let key = self.api.generate_font_instance_key();
let mut update = ResourceUpdates::new();
let mut options: FontInstanceOptions = Default::default();
options.flags |= flags;
if let Some(render_mode) = render_mode {
options.render_mode = render_mode;
}
update.add_font_instance(key, font_key, size, Some(options), None, Vec::new());
self.api.update_resources(update);
key
}
#[allow(dead_code)]
pub fn delete_font_instance(&mut self, key: FontInstanceKey) {
let mut update = ResourceUpdates::new();
update.delete_font_instance(key);
self.api.update_resources(update);
}
pub fn update(&mut self, dim: DeviceUintSize) {
if dim != self.window_size {
self.window_size = dim;
}
}
pub fn begin_frame(&mut self) {
self.frame_start_sender.push(time::SteadyTime::now());
}
pub fn send_lists(
&mut self,
frame_number: u32,
display_lists: Vec<(PipelineId, LayerSize, BuiltDisplayList)>,
scroll_offsets: &HashMap<ClipId, LayerPoint>,
) {
let root_background_color = Some(ColorF::new(1.0, 1.0, 1.0, 1.0));
let mut txn = Transaction::new();
for display_list in display_lists {
txn.set_display_list(
Epoch(frame_number),
root_background_color,
self.window_size_f32(),
display_list,
false,
);
}
// TODO(nical) - Need to separate the set_display_list from the scrolling
// operations into separate transactions for mysterious -but probably related
// to the other comment below- reasons.
self.api.send_transaction(self.document_id, txn);
let mut txn = Transaction::new();
for (id, offset) in scroll_offsets {
txn.scroll_node_with_id(
*offset,
*id,
ScrollClamping::NoClamping,
);
}
// TODO(nical) - Wrench does not notify frames when there was scrolling
// in the transaction (See RenderNotifier implementations). If we don't
// generate a frame after scrolling, wrench just stops and some tests
// will time out.
// I suppose this was to avoid taking the snapshot after scrolling if
// there was other updates coming in a subsequent messages but it's very
// error-prone with transactions.
// For now just send two transactions to avoid the deadlock, but we should
// figure this out.
self.api.send_transaction(self.document_id, txn);
let mut txn = Transaction::new();
txn.generate_frame();
self.api.send_transaction(self.document_id, txn);
}
pub fn get_frame_profiles(
&mut self,
) -> (Vec<webrender::CpuProfile>, Vec<webrender::GpuProfile>) {
self.renderer.get_frame_profiles()
}
pub fn render(&mut self) -> RendererStats {
self.renderer.update();
self.renderer
.render(self.window_size)
.expect("errors encountered during render!")
}
pub fn refresh(&mut self) {
self.begin_frame();
let mut txn = Transaction::new();
txn.generate_frame();
self.api.send_transaction(self.document_id, txn);
}
pub fn show_onscreen_help(&mut self) {
let help_lines = [
"Esc - Quit",
"H - Toggle help",
"R - Toggle recreating display items each frame",
"P - Toggle profiler",
"O - Toggle showing intermediate targets",
"I - Toggle showing texture caches",
"B - Toggle showing alpha primitive rects",
"S - Toggle compact profiler",
"Q - Toggle GPU queries for time and samples",
"M - Trigger memory pressure event",
"T - Save CPU profile to a file",
"C - Save a capture to captures/wrench/",
];
let color_and_offset = [(*BLACK_COLOR, 2.0), (*WHITE_COLOR, 0.0)];
let dr = self.renderer.debug_renderer();
for ref co in &color_and_offset {
let x = self.device_pixel_ratio * (15.0 + co.1);
let mut y = self.device_pixel_ratio * (15.0 + co.1 + dr.line_height());
for ref line in &help_lines {
dr.add_text(x, y, line, co.0.into());
y += self.device_pixel_ratio * dr.line_height();
}
}
}
}

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,577 @@
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use app_units::Au;
use euclid::{Angle, TypedSize2D};
use parse_function::parse_function;
use std::f32;
use std::str::FromStr;
use webrender::api::*;
use yaml_rust::{Yaml, YamlLoader};
pub trait YamlHelper {
fn as_f32(&self) -> Option<f32>;
fn as_force_f32(&self) -> Option<f32>;
fn as_vec_f32(&self) -> Option<Vec<f32>>;
fn as_vec_u32(&self) -> Option<Vec<u32>>;
fn as_vec_u64(&self) -> Option<Vec<u64>>;
fn as_pipeline_id(&self) -> Option<PipelineId>;
fn as_rect(&self) -> Option<LayoutRect>;
fn as_size(&self) -> Option<LayoutSize>;
fn as_point(&self) -> Option<LayoutPoint>;
fn as_vector(&self) -> Option<LayoutVector2D>;
fn as_matrix4d(&self) -> Option<LayoutTransform>;
fn as_transform(&self, transform_origin: &LayoutPoint) -> Option<LayoutTransform>;
fn as_colorf(&self) -> Option<ColorF>;
fn as_vec_colorf(&self) -> Option<Vec<ColorF>>;
fn as_px_to_au(&self) -> Option<Au>;
fn as_pt_to_au(&self) -> Option<Au>;
fn as_vec_string(&self) -> Option<Vec<String>>;
fn as_border_radius_component(&self) -> LayoutSize;
fn as_border_radius(&self) -> Option<BorderRadius>;
fn as_transform_style(&self) -> Option<TransformStyle>;
fn as_clip_mode(&self) -> Option<ClipMode>;
fn as_mix_blend_mode(&self) -> Option<MixBlendMode>;
fn as_scroll_policy(&self) -> Option<ScrollPolicy>;
fn as_filter_op(&self) -> Option<FilterOp>;
fn as_vec_filter_op(&self) -> Option<Vec<FilterOp>>;
}
fn string_to_color(color: &str) -> Option<ColorF> {
match color {
"red" => Some(ColorF::new(1.0, 0.0, 0.0, 1.0)),
"green" => Some(ColorF::new(0.0, 1.0, 0.0, 1.0)),
"blue" => Some(ColorF::new(0.0, 0.0, 1.0, 1.0)),
"white" => Some(ColorF::new(1.0, 1.0, 1.0, 1.0)),
"black" => Some(ColorF::new(0.0, 0.0, 0.0, 1.0)),
"yellow" => Some(ColorF::new(1.0, 1.0, 0.0, 1.0)),
s => {
let items: Vec<f32> = s.split_whitespace()
.map(|s| f32::from_str(s).unwrap())
.collect();
if items.len() == 3 {
Some(ColorF::new(
items[0] / 255.0,
items[1] / 255.0,
items[2] / 255.0,
1.0,
))
} else if items.len() == 4 {
Some(ColorF::new(
items[0] / 255.0,
items[1] / 255.0,
items[2] / 255.0,
items[3],
))
} else {
None
}
}
}
}
pub trait StringEnum: Sized {
fn from_str(&str) -> Option<Self>;
fn as_str(&self) -> &'static str;
}
macro_rules! define_string_enum {
($T:ident, [ $( $y:ident = $x:expr ),* ]) => {
impl StringEnum for $T {
fn from_str(text: &str) -> Option<$T> {
match text {
$( $x => Some($T::$y), )*
_ => {
println!("Unrecognized {} value '{}'", stringify!($T), text);
None
}
}
}
fn as_str(&self) -> &'static str {
match *self {
$( $T::$y => $x, )*
}
}
}
}
}
define_string_enum!(TransformStyle, [Flat = "flat", Preserve3D = "preserve-3d"]);
define_string_enum!(
MixBlendMode,
[
Normal = "normal",
Multiply = "multiply",
Screen = "screen",
Overlay = "overlay",
Darken = "darken",
Lighten = "lighten",
ColorDodge = "color-dodge",
ColorBurn = "color-burn",
HardLight = "hard-light",
SoftLight = "soft-light",
Difference = "difference",
Exclusion = "exclusion",
Hue = "hue",
Saturation = "saturation",
Color = "color",
Luminosity = "luminosity"
]
);
define_string_enum!(ScrollPolicy, [Scrollable = "scrollable", Fixed = "fixed"]);
define_string_enum!(
LineOrientation,
[Horizontal = "horizontal", Vertical = "vertical"]
);
define_string_enum!(
LineStyle,
[
Solid = "solid",
Dotted = "dotted",
Dashed = "dashed",
Wavy = "wavy"
]
);
define_string_enum!(ClipMode, [Clip = "clip", ClipOut = "clip-out"]);
// Rotate around `axis` by `degrees` angle
fn make_rotation(
origin: &LayoutPoint,
degrees: f32,
axis_x: f32,
axis_y: f32,
axis_z: f32,
) -> LayoutTransform {
let pre_transform = LayoutTransform::create_translation(origin.x, origin.y, 0.0);
let post_transform = LayoutTransform::create_translation(-origin.x, -origin.y, -0.0);
let theta = 2.0f32 * f32::consts::PI - degrees.to_radians();
let transform =
LayoutTransform::identity().pre_rotate(axis_x, axis_y, axis_z, Angle::radians(theta));
pre_transform.pre_mul(&transform).pre_mul(&post_transform)
}
// Create a skew matrix, specified in degrees.
fn make_skew(
skew_x: f32,
skew_y: f32,
) -> LayoutTransform {
let alpha = Angle::radians(skew_x.to_radians());
let beta = Angle::radians(skew_y.to_radians());
LayoutTransform::create_skew(alpha, beta)
}
impl YamlHelper for Yaml {
fn as_f32(&self) -> Option<f32> {
match *self {
Yaml::Integer(iv) => Some(iv as f32),
Yaml::Real(ref sv) => f32::from_str(sv.as_str()).ok(),
_ => None,
}
}
fn as_force_f32(&self) -> Option<f32> {
match *self {
Yaml::Integer(iv) => Some(iv as f32),
Yaml::String(ref sv) | Yaml::Real(ref sv) => f32::from_str(sv.as_str()).ok(),
_ => None,
}
}
fn as_vec_f32(&self) -> Option<Vec<f32>> {
match *self {
Yaml::String(ref s) | Yaml::Real(ref s) => s.split_whitespace()
.map(|v| f32::from_str(v))
.collect::<Result<Vec<_>, _>>()
.ok(),
Yaml::Array(ref v) => v.iter()
.map(|v| match *v {
Yaml::Integer(k) => Ok(k as f32),
Yaml::String(ref k) | Yaml::Real(ref k) => f32::from_str(k).map_err(|_| false),
_ => Err(false),
})
.collect::<Result<Vec<_>, _>>()
.ok(),
Yaml::Integer(k) => Some(vec![k as f32]),
_ => None,
}
}
fn as_vec_u32(&self) -> Option<Vec<u32>> {
if let Some(v) = self.as_vec() {
Some(v.iter().map(|v| v.as_i64().unwrap() as u32).collect())
} else {
None
}
}
fn as_vec_u64(&self) -> Option<Vec<u64>> {
if let Some(v) = self.as_vec() {
Some(v.iter().map(|v| v.as_i64().unwrap() as u64).collect())
} else {
None
}
}
fn as_pipeline_id(&self) -> Option<PipelineId> {
if let Some(v) = self.as_vec() {
let a = v.get(0).and_then(|v| v.as_i64()).map(|v| v as u32);
let b = v.get(1).and_then(|v| v.as_i64()).map(|v| v as u32);
match (a, b) {
(Some(a), Some(b)) if v.len() == 2 => Some(PipelineId(a, b)),
_ => None,
}
} else {
None
}
}
fn as_px_to_au(&self) -> Option<Au> {
match self.as_force_f32() {
Some(fv) => Some(Au::from_f32_px(fv)),
None => None,
}
}
fn as_pt_to_au(&self) -> Option<Au> {
match self.as_force_f32() {
Some(fv) => Some(Au::from_f32_px(fv * 16. / 12.)),
None => None,
}
}
fn as_rect(&self) -> Option<LayoutRect> {
if self.is_badvalue() {
return None;
}
if let Some(nums) = self.as_vec_f32() {
if nums.len() == 4 {
return Some(LayoutRect::new(
LayoutPoint::new(nums[0], nums[1]),
LayoutSize::new(nums[2], nums[3]),
));
}
}
None
}
fn as_size(&self) -> Option<LayoutSize> {
if self.is_badvalue() {
return None;
}
if let Some(nums) = self.as_vec_f32() {
if nums.len() == 2 {
return Some(LayoutSize::new(nums[0], nums[1]));
}
}
None
}
fn as_point(&self) -> Option<LayoutPoint> {
if self.is_badvalue() {
return None;
}
if let Some(nums) = self.as_vec_f32() {
if nums.len() == 2 {
return Some(LayoutPoint::new(nums[0], nums[1]));
}
}
None
}
fn as_vector(&self) -> Option<LayoutVector2D> {
self.as_point().map(|p| p.to_vector())
}
fn as_matrix4d(&self) -> Option<LayoutTransform> {
if let Some(nums) = self.as_vec_f32() {
assert_eq!(nums.len(), 16, "expected 16 floats, got '{:?}'", self);
Some(LayoutTransform::row_major(
nums[0],
nums[1],
nums[2],
nums[3],
nums[4],
nums[5],
nums[6],
nums[7],
nums[8],
nums[9],
nums[10],
nums[11],
nums[12],
nums[13],
nums[14],
nums[15],
))
} else {
None
}
}
fn as_transform(&self, transform_origin: &LayoutPoint) -> Option<LayoutTransform> {
if let Some(transform) = self.as_matrix4d() {
return Some(transform);
}
match *self {
Yaml::String(ref string) => {
let mut slice = string.as_str();
let mut transform = LayoutTransform::identity();
while !slice.is_empty() {
let (function, ref args, reminder) = parse_function(slice);
slice = reminder;
let mx = match function {
"translate" if args.len() >= 2 => {
let z = args.get(2).and_then(|a| a.parse().ok()).unwrap_or(0.);
LayoutTransform::create_translation(
args[0].parse().unwrap(),
args[1].parse().unwrap(),
z,
)
}
"rotate" | "rotate-z" if args.len() == 1 => {
make_rotation(transform_origin, args[0].parse().unwrap(), 0.0, 0.0, 1.0)
}
"rotate-x" if args.len() == 1 => {
make_rotation(transform_origin, args[0].parse().unwrap(), 1.0, 0.0, 0.0)
}
"rotate-y" if args.len() == 1 => {
make_rotation(transform_origin, args[0].parse().unwrap(), 0.0, 1.0, 0.0)
}
"scale" if args.len() >= 1 => {
let x = args[0].parse().unwrap();
// Default to uniform X/Y scale if Y unspecified.
let y = args.get(1).and_then(|a| a.parse().ok()).unwrap_or(x);
// Default to no Z scale if unspecified.
let z = args.get(2).and_then(|a| a.parse().ok()).unwrap_or(1.0);
LayoutTransform::create_scale(x, y, z)
}
"scale-x" if args.len() == 1 => {
LayoutTransform::create_scale(args[0].parse().unwrap(), 1.0, 1.0)
}
"scale-y" if args.len() == 1 => {
LayoutTransform::create_scale(1.0, args[0].parse().unwrap(), 1.0)
}
"scale-z" if args.len() == 1 => {
LayoutTransform::create_scale(1.0, 1.0, args[0].parse().unwrap())
}
"skew" if args.len() >= 1 => {
// Default to no Y skew if unspecified.
let skew_y = args.get(1).and_then(|a| a.parse().ok()).unwrap_or(0.0);
make_skew(args[0].parse().unwrap(), skew_y)
}
"skew-x" if args.len() == 1 => {
make_skew(args[0].parse().unwrap(), 0.0)
}
"skew-y" if args.len() == 1 => {
make_skew(0.0, args[0].parse().unwrap())
}
"perspective" if args.len() == 1 => {
LayoutTransform::create_perspective(args[0].parse().unwrap())
}
_ => {
println!("unknown function {}", function);
break;
}
};
transform = transform.post_mul(&mx);
}
Some(transform)
}
Yaml::Array(ref array) => {
let transform = array.iter().fold(
LayoutTransform::identity(),
|u, yaml| match yaml.as_transform(transform_origin) {
Some(ref transform) => u.pre_mul(transform),
None => u,
},
);
Some(transform)
}
Yaml::BadValue => None,
_ => {
println!("unknown transform {:?}", self);
None
}
}
}
fn as_colorf(&self) -> Option<ColorF> {
if let Some(mut nums) = self.as_vec_f32() {
assert!(
nums.len() == 3 || nums.len() == 4,
"color expected a color name, or 3-4 floats; got '{:?}'",
self
);
if nums.len() == 3 {
nums.push(1.0);
}
return Some(ColorF::new(
nums[0] / 255.0,
nums[1] / 255.0,
nums[2] / 255.0,
nums[3],
));
}
if let Some(s) = self.as_str() {
string_to_color(s)
} else {
None
}
}
fn as_vec_colorf(&self) -> Option<Vec<ColorF>> {
if let Some(v) = self.as_vec() {
Some(v.iter().map(|v| v.as_colorf().unwrap()).collect())
} else if let Some(color) = self.as_colorf() {
Some(vec![color])
} else {
None
}
}
fn as_vec_string(&self) -> Option<Vec<String>> {
if let Some(v) = self.as_vec() {
Some(v.iter().map(|v| v.as_str().unwrap().to_owned()).collect())
} else if let Some(s) = self.as_str() {
Some(vec![s.to_owned()])
} else {
None
}
}
fn as_border_radius_component(&self) -> LayoutSize {
if let Yaml::Integer(integer) = *self {
return LayoutSize::new(integer as f32, integer as f32);
}
self.as_size().unwrap_or(TypedSize2D::zero())
}
fn as_border_radius(&self) -> Option<BorderRadius> {
if let Some(size) = self.as_size() {
return Some(BorderRadius::uniform_size(size));
}
match *self {
Yaml::BadValue => None,
Yaml::String(ref s) | Yaml::Real(ref s) => {
let fv = f32::from_str(s).unwrap();
Some(BorderRadius::uniform(fv))
}
Yaml::Integer(v) => Some(BorderRadius::uniform(v as f32)),
Yaml::Array(ref array) if array.len() == 4 => {
let top_left = array[0].as_border_radius_component();
let top_right = array[1].as_border_radius_component();
let bottom_left = array[2].as_border_radius_component();
let bottom_right = array[3].as_border_radius_component();
Some(BorderRadius {
top_left,
top_right,
bottom_left,
bottom_right,
})
}
Yaml::Hash(_) => {
let top_left = self["top-left"].as_border_radius_component();
let top_right = self["top-right"].as_border_radius_component();
let bottom_left = self["bottom-left"].as_border_radius_component();
let bottom_right = self["bottom-right"].as_border_radius_component();
Some(BorderRadius {
top_left,
top_right,
bottom_left,
bottom_right,
})
}
_ => {
panic!("Invalid border radius specified: {:?}", self);
}
}
}
fn as_transform_style(&self) -> Option<TransformStyle> {
self.as_str().and_then(|x| StringEnum::from_str(x))
}
fn as_mix_blend_mode(&self) -> Option<MixBlendMode> {
self.as_str().and_then(|x| StringEnum::from_str(x))
}
fn as_scroll_policy(&self) -> Option<ScrollPolicy> {
self.as_str().and_then(|x| StringEnum::from_str(x))
}
fn as_clip_mode(&self) -> Option<ClipMode> {
self.as_str().and_then(|x| StringEnum::from_str(x))
}
fn as_filter_op(&self) -> Option<FilterOp> {
if let Some(s) = self.as_str() {
match parse_function(s) {
("blur", ref args, _) if args.len() == 1 => {
Some(FilterOp::Blur(args[0].parse().unwrap()))
}
("brightness", ref args, _) if args.len() == 1 => {
Some(FilterOp::Brightness(args[0].parse().unwrap()))
}
("contrast", ref args, _) if args.len() == 1 => {
Some(FilterOp::Contrast(args[0].parse().unwrap()))
}
("grayscale", ref args, _) if args.len() == 1 => {
Some(FilterOp::Grayscale(args[0].parse().unwrap()))
}
("hue-rotate", ref args, _) if args.len() == 1 => {
Some(FilterOp::HueRotate(args[0].parse().unwrap()))
}
("invert", ref args, _) if args.len() == 1 => {
Some(FilterOp::Invert(args[0].parse().unwrap()))
}
("opacity", ref args, _) if args.len() == 1 => {
let amount: f32 = args[0].parse().unwrap();
Some(FilterOp::Opacity(amount.into(), amount))
}
("saturate", ref args, _) if args.len() == 1 => {
Some(FilterOp::Saturate(args[0].parse().unwrap()))
}
("sepia", ref args, _) if args.len() == 1 => {
Some(FilterOp::Sepia(args[0].parse().unwrap()))
}
("drop-shadow", ref args, _) if args.len() == 3 => {
let str = format!("---\noffset: {}\nblur-radius: {}\ncolor: {}\n", args[0], args[1], args[2]);
let mut yaml_doc = YamlLoader::load_from_str(&str).expect("Failed to parse drop-shadow");
let yaml = yaml_doc.pop().unwrap();
Some(FilterOp::DropShadow(yaml["offset"].as_vector().unwrap(),
yaml["blur-radius"].as_f32().unwrap(),
yaml["color"].as_colorf().unwrap()))
}
(_, _, _) => None,
}
} else {
None
}
}
fn as_vec_filter_op(&self) -> Option<Vec<FilterOp>> {
if let Some(v) = self.as_vec() {
Some(v.iter().map(|x| x.as_filter_op().unwrap()).collect())
} else {
self.as_filter_op().map(|op| vec![op])
}
}
}

View File

@ -419,10 +419,8 @@ SVGDocumentWrapper::UnregisterForXPCOMShutdown()
void
SVGDocumentWrapper::FlushLayout()
{
nsCOMPtr<nsIPresShell> presShell;
mViewer->GetPresShell(getter_AddRefs(presShell));
if (presShell) {
presShell->FlushPendingNotifications(FlushType::Layout);
if (nsIDocument* doc = GetDocument()) {
doc->FlushPendingNotifications(FlushType::Layout);
}
}

View File

@ -1587,6 +1587,28 @@ CollectDocuments(nsIDocument* aDocument, void* aDocArray)
return true;
}
void
nsRefreshDriver::UpdateIntersectionObservations()
{
AutoTArray<nsCOMPtr<nsIDocument>, 32> documents;
if (mPresContext->Document()->HasIntersectionObservers()) {
documents.AppendElement(mPresContext->Document());
}
mPresContext->Document()->CollectDescendantDocuments(
documents,
[](const nsIDocument* document) -> bool {
return document->HasIntersectionObservers();
});
for (uint32_t i = 0; i < documents.Length(); ++i) {
nsIDocument* doc = documents[i];
doc->UpdateIntersectionObservations();
doc->ScheduleIntersectionObserverNotification();
}
}
void
nsRefreshDriver::DispatchAnimationEvents()
{
@ -1955,13 +1977,7 @@ nsRefreshDriver::Tick(int64_t aNowEpoch, TimeStamp aNowTime)
}
#endif
AutoTArray<nsCOMPtr<nsIDocument>, 32> documents;
CollectDocuments(mPresContext->Document(), &documents);
for (uint32_t i = 0; i < documents.Length(); ++i) {
nsIDocument* doc = documents[i];
doc->UpdateIntersectionObservations();
doc->ScheduleIntersectionObserverNotification();
}
UpdateIntersectionObservations();
/*
* Perform notification to imgIRequests subscribed to listen

View File

@ -385,6 +385,7 @@ private:
void DispatchPendingEvents();
void DispatchAnimationEvents();
void RunFrameRequestCallbacks(mozilla::TimeStamp aNowTime);
void UpdateIntersectionObservations();
void Tick(int64_t aNowEpoch, mozilla::TimeStamp aNowTime);
enum EnsureTimerStartedFlags {

View File

@ -6358,6 +6358,10 @@ ContainerState::CreateMaskLayer(Layer *aLayer,
gfx::Rect boundingRect = CalculateBounds(newData.mRoundedClipRects,
newData.mAppUnitsPerDevPixel);
boundingRect.Scale(mParameters.mXScale, mParameters.mYScale);
if (boundingRect.IsEmpty()) {
// Return early if we know that there is effectively no visible data.
return nullptr;
}
uint32_t maxSize = mManager->GetMaxTextureSize();
NS_ASSERTION(maxSize > 0, "Invalid max texture size");

View File

@ -109,8 +109,10 @@ SERVO_BINDING_FUNC(Servo_StyleSet_InsertStyleSheetBefore, void,
RawServoStyleSetBorrowed set,
const mozilla::ServoStyleSheet* gecko_sheet,
const mozilla::ServoStyleSheet* before)
SERVO_BINDING_FUNC(Servo_StyleSet_FlushStyleSheets, void, RawServoStyleSetBorrowed set,
RawGeckoElementBorrowedOrNull doc_elem)
SERVO_BINDING_FUNC(Servo_StyleSet_FlushStyleSheets, void,
RawServoStyleSetBorrowed set,
RawGeckoElementBorrowedOrNull doc_elem,
const mozilla::ServoElementSnapshotTable* snapshots)
SERVO_BINDING_FUNC(Servo_StyleSet_NoteStyleSheetsChanged, void,
RawServoStyleSetBorrowed set,
bool author_style_disabled,

View File

@ -1435,7 +1435,11 @@ ServoStyleSet::UpdateStylist()
// since they are loaded and unloaded synchronously, and they don't have to
// deal with dynamic content changes.
Element* root = IsMaster() ? mDocument->GetRootElement() : nullptr;
Servo_StyleSet_FlushStyleSheets(mRawSet.get(), root);
const ServoElementSnapshotTable* snapshots = nullptr;
if (nsPresContext* pc = GetPresContext()) {
snapshots = &pc->RestyleManager()->AsServo()->Snapshots();
}
Servo_StyleSet_FlushStyleSheets(mRawSet.get(), root, snapshots);
}
if (MOZ_UNLIKELY(mStylistState & StylistState::XBLStyleSheetsDirty)) {

View File

@ -830,6 +830,8 @@ class TransportTest : public MtransportTest {
TransportTest() {
fds_[0] = nullptr;
fds_[1] = nullptr;
p1_ = nullptr;
p2_ = nullptr;
}
void TearDown() override {
@ -858,6 +860,12 @@ class TransportTest : public MtransportTest {
}
void Reset() {
if (p1_) {
delete p1_;
}
if (p2_) {
delete p2_;
}
p1_ = new TransportTestPeer(target_, "P1", test_utils_);
p2_ = new TransportTestPeer(target_, "P2", test_utils_);
}

View File

@ -26,7 +26,7 @@ Telemetry extras sent for a successful content download might look like this:
"content": "25610abb-5dc8-fd75-40e7-990507f010c4"
}
For failed content downloads an additional ``error`` field contains the error type that occured when downloading the content. The value can be one of:
For failed content downloads an additional ``error`` field contains the error type that occurred when downloading the content. The value can be one of:
- no_network
- network_metered

View File

@ -125,7 +125,7 @@ registration due to inactivity or an unexpected server event. Each
`PushSubscription` is associated to a given *uaid* and correponds to a unique
(per-*uaid*) *chid* (Channel ID) on the autopush server. An individual *chid*
is potentially long-lived, but clients must expect the service to expire *chid*s
as part of regular maintainence. The `PushManager` uses an `AutopushClient`
as part of regular maintenance. The `PushManager` uses an `AutopushClient`
instance to interact with the autopush server.
Between the `PushManager`, the `PushManagerStorage`, and assorted GCM event

View File

@ -27,7 +27,7 @@ shutdown as well, so as to
1) provide an immediate visual feedback to the user that Firefox is indeed quitting
2) avoid a state where the UI is still running "normally" while the rendering engine is already
shutting down, which could lead to loosing incoming external tabs if they were to arrive within
shutting down, which could lead to losing incoming external tabs if they were to arrive within
that period.
Therefore, shutdown of the native UI was originally started simultaneously with notifying Gecko.

View File

@ -206,6 +206,11 @@ pref("dom.gamepad.haptic_feedback.enabled", true);
// even if this is true).
pref("dom.keyboardevent.dispatch_during_composition", false);
// If this is true, TextEventDispatcher dispatches keypress event with setting
// WidgetEvent::mFlags::mOnlySystemGroupDispatchInContent to true if it won't
// cause inputting printable character.
pref("dom.keyboardevent.keypress.dispatch_non_printable_keys_only_system_group_in_content", false);
// Whether to run add-on code in different compartments from browser code. This
// causes a separate compartment for each (addon, global) combination, which may
// significantly increase the number of compartments in the system.

View File

@ -510,7 +510,7 @@ def generate_gn_config(config, srcdir, output, non_unified_sources, gn_binary,
gn_args = '--args=%s' % ' '.join(['%s=%s' % (k, str_for_arg(v)) for k, v
in input_variables.iteritems()])
gn_arg_string = '_'.join([str(input_variables[k]) for k in sorted(input_variables.keys())])
out_dir = mozpath.join(config.topobjdir, 'gn-output')
out_dir = mozpath.join(output, 'gn-output')
gen_args = [
config.substs['GN'], 'gen', out_dir, gn_args, '--ide=json',
]
@ -560,7 +560,7 @@ class GnMozbuildWriterBackend(BuildBackend):
# Check the objdir for a gn-config in to aide debugging in cases
# someone is running both steps on the same machine and want to
# sanity check moz.build generation for a particular config.
gn_config_files = glob.glob(mozpath.join(obj.topobjdir,
gn_config_files = glob.glob(mozpath.join(obj.objdir, obj.target_dir,
'gn-output', '*.json'))
if gn_config_files:
print("Writing moz.build files based on the following gn configs: %s" %

View File

@ -8,13 +8,11 @@
<!ENTITY certmgr.tab.others2 "People">
<!ENTITY certmgr.tab.websites3 "Servers">
<!ENTITY certmgr.tab.ca "Authorities">
<!ENTITY certmgr.tab.orphan2 "Others">
<!ENTITY certmgr.mine2 "You have certificates from these organizations that identify you">
<!ENTITY certmgr.others2 "You have certificates on file that identify these people">
<!ENTITY certmgr.websites3 "You have certificates on file that identify these servers">
<!ENTITY certmgr.cas2 "You have certificates on file that identify these certificate authorities">
<!ENTITY certmgr.orphans2 "You have certificates on file that do not fit in any of the other categories">
<!ENTITY certmgr.detail.general_tab.title "General">
<!ENTITY certmgr.detail.general_tab.accesskey "G">

View File

@ -32,9 +32,6 @@ deleteEmailCertConfirm=Are you sure you want to delete these peoples e-mail c
deleteEmailCertImpactDesc=If you delete a persons e-mail certificate, you will no longer be able to send encrypted e-mail to that person.
deleteEmailCertTitle=Delete E-Mail Certificates
deleteOrphanCertConfirm=Are you sure you want to delete these certificates?
deleteOrphanCertTitle=Delete Certificates
# PKCS#12 file dialogs
chooseP12RestoreFileDialog2=Certificate File to Import
chooseP12BackupFileDialog=File Name to Backup

View File

@ -1,47 +0,0 @@
<?xml version="1.0"?>
<!-- This Source Code Form is subject to the terms of the Mozilla Public
- License, v. 2.0. If a copy of the MPL was not distributed with this
- file, You can obtain one at http://mozilla.org/MPL/2.0/. -->
<?xml-stylesheet href="chrome://global/skin/" type="text/css"?>
<!DOCTYPE overlay SYSTEM "chrome://pippki/locale/certManager.dtd">
<overlay id="OrphanOverlay"
xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#"
xmlns:cert="http://netscape.com/rdf-cert#"
xmlns="http://www.mozilla.org/keymaster/gatekeeper/there.is.only.xul">
<vbox id="OrphanCerts">
<description>&certmgr.orphans2;</description>
<separator class="thin"/>
<tree id="orphan-tree" flex="1" enableColumnDrag="true"
onselect="orphan_enableButtons()">
<treecols>
<treecol id="certcol" label="&certmgr.certname;" primary="true"
persist="hidden width ordinal" flex="1"/>
<splitter class="tree-splitter"/>
<treecol id="tokencol" label="&certmgr.tokenname;"
persist="hidden width ordinal" flex="1"/>
</treecols>
<treechildren ondblclick="viewCerts();"/>
</tree>
<separator class="thin"/>
<hbox>
<button id="orphan_viewButton" class="normal"
label="&certmgr.view2.label;"
accesskey="&certmgr.view2.accesskey;"
disabled="true" oncommand="viewCerts();"/>
<button id="orphan_exportButton" class="normal"
label="&certmgr.export.label;"
accesskey="&certmgr.export.accesskey;"
disabled="true" oncommand="exportCerts();"/>
<button id="orphan_deleteButton" class="normal"
label="&certmgr.delete2.label;"
accesskey="&certmgr.delete2.accesskey;"
disabled="true" oncommand="deleteCerts();"/>
</hbox>
</vbox>
</overlay>

View File

@ -47,11 +47,6 @@ var emailTreeView;
* @type nsICertTree
*/
var userTreeView;
/**
* Cert tree for the "Other" tab.
* @type nsICertTree
*/
var orphanTreeView;
function LoadCerts() {
certdb = Components.classes[nsX509CertDB].getService(nsIX509CertDB);
@ -77,11 +72,6 @@ function LoadCerts() {
userTreeView.loadCertsFromCache(certcache, nsIX509Cert.USER_CERT);
document.getElementById("user-tree").view = userTreeView;
orphanTreeView = Components.classes[nsCertTree]
.createInstance(nsICertTree);
orphanTreeView.loadCertsFromCache(certcache, nsIX509Cert.UNKNOWN_CERT);
document.getElementById("orphan-tree").view = orphanTreeView;
enableBackupAllButton();
}
@ -95,7 +85,6 @@ function getSelectedCerts() {
var mine_tab = document.getElementById("mine_tab");
var others_tab = document.getElementById("others_tab");
var websites_tab = document.getElementById("websites_tab");
var orphan_tab = document.getElementById("orphan_tab");
var items = null;
if (ca_tab.selected) {
items = caTreeView.selection;
@ -105,8 +94,6 @@ function getSelectedCerts() {
items = emailTreeView.selection;
} else if (websites_tab.selected) {
items = serverTreeView.selection;
} else if (orphan_tab.selected) {
items = orphanTreeView.selection;
}
selected_certs = [];
var cert = null;
@ -128,8 +115,6 @@ function getSelectedCerts() {
cert = emailTreeView.getCert(j);
} else if (websites_tab.selected) {
cert = serverTreeView.getCert(j);
} else if (orphan_tab.selected) {
cert = orphanTreeView.getCert(j);
}
if (cert) {
var sc = selected_certs.length;
@ -146,7 +131,6 @@ function getSelectedTreeItems() {
var mine_tab = document.getElementById("mine_tab");
var others_tab = document.getElementById("others_tab");
var websites_tab = document.getElementById("websites_tab");
var orphan_tab = document.getElementById("orphan_tab");
var items = null;
if (ca_tab.selected) {
items = caTreeView.selection;
@ -156,8 +140,6 @@ function getSelectedTreeItems() {
items = emailTreeView.selection;
} else if (websites_tab.selected) {
items = serverTreeView.selection;
} else if (orphan_tab.selected) {
items = orphanTreeView.selection;
}
selected_certs = [];
selected_tree_items = [];
@ -181,8 +163,6 @@ function getSelectedTreeItems() {
tree_item = emailTreeView.getTreeItem(j);
} else if (websites_tab.selected) {
tree_item = serverTreeView.getTreeItem(j);
} else if (orphan_tab.selected) {
tree_item = orphanTreeView.getTreeItem(j);
}
if (tree_item) {
var sc = selected_tree_items.length;
@ -278,15 +258,6 @@ function email_enableButtons() {
enableButtonsForCertTree(emailTreeView, idList);
}
function orphan_enableButtons() {
let idList = [
"orphan_viewButton",
"orphan_exportButton",
"orphan_deleteButton",
];
enableButtonsForCertTree(orphanTreeView, idList);
}
function backupCerts() {
getSelectedCerts();
var numcerts = selected_certs.length;
@ -405,7 +376,6 @@ function deleteCerts() {
"websites_tab": serverTreeView,
"ca_tab": caTreeView,
"others_tab": emailTreeView,
"orphan_tab": orphanTreeView,
};
let selTab = document.getElementById("certMgrTabbox").selectedItem;
let selTabID = selTab.getAttribute("id");
@ -490,6 +460,4 @@ function addException() {
var certcache = certdb.getCerts();
serverTreeView.loadCertsFromCache(certcache, nsIX509Cert.SERVER_CERT);
serverTreeView.selection.clearSelection();
orphanTreeView.loadCertsFromCache(certcache, nsIX509Cert.UNKNOWN_CERT);
orphanTreeView.selection.clearSelection();
}

View File

@ -9,7 +9,6 @@
<?xul-overlay href="chrome://pippki/content/OthersOverlay.xul"?>
<?xul-overlay href="chrome://pippki/content/WebSitesOverlay.xul"?>
<?xul-overlay href="chrome://pippki/content/CAOverlay.xul"?>
<?xul-overlay href="chrome://pippki/content/OrphanOverlay.xul"?>
<!DOCTYPE dialog SYSTEM "chrome://pippki/locale/certManager.dtd">
@ -34,14 +33,12 @@
<tab id="others_tab" label="&certmgr.tab.others2;"/>
<tab id="websites_tab" label="&certmgr.tab.websites3;"/>
<tab id="ca_tab" label="&certmgr.tab.ca;" selected="true"/>
<tab id="orphan_tab" label="&certmgr.tab.orphan2;"/>
</tabs>
<tabpanels flex="1">
<vbox id="myCerts" flex="1"/>
<vbox id="othersCerts" flex="1"/>
<vbox id="webCerts" flex="1"/>
<vbox id="CACerts" flex="1"/>
<vbox id="OrphanCerts" flex="1"/>
</tabpanels>
</tabbox>

View File

@ -83,11 +83,6 @@ function onLoad() {
confirm = bundle.getString("deleteEmailCertConfirm");
impact = bundle.getString("deleteEmailCertImpactDesc");
break;
case "orphan_tab":
title = bundle.getString("deleteOrphanCertTitle");
confirm = bundle.getString("deleteOrphanCertConfirm");
impact = "";
break;
default:
return;
}

View File

@ -6,7 +6,6 @@ pippki.jar:
% content pippki %content/pippki/
content/pippki/CAOverlay.xul (content/CAOverlay.xul)
content/pippki/MineOverlay.xul (content/MineOverlay.xul)
content/pippki/OrphanOverlay.xul (content/OrphanOverlay.xul)
content/pippki/OthersOverlay.xul (content/OthersOverlay.xul)
content/pippki/WebSitesOverlay.xul (content/WebSitesOverlay.xul)
content/pippki/certDump.xul (content/certDump.xul)

View File

@ -10,6 +10,8 @@
#include "mozilla/Base64.h"
#include "mozilla/Casting.h"
#include "mozilla/Services.h"
#include "mozilla/ErrorResult.h"
#include "mozilla/dom/Promise.h"
#include "nsCOMPtr.h"
#include "nsIInterfaceRequestor.h"
#include "nsIInterfaceRequestorUtils.h"
@ -23,10 +25,41 @@
#include "ssl.h" // For SSL_ClearSessionCache
using namespace mozilla;
using dom::Promise;
// NOTE: Should these be the thread-safe versions?
NS_IMPL_ISUPPORTS(SecretDecoderRing, nsISecretDecoderRing)
void BackgroundSdrEncryptStrings(const nsTArray<nsCString>& plaintexts,
RefPtr<Promise>& aPromise) {
nsCOMPtr<nsISecretDecoderRing> sdrService =
do_GetService(NS_SECRETDECODERRING_CONTRACTID);
InfallibleTArray<nsString> cipherTexts(plaintexts.Length());
nsresult rv = NS_ERROR_FAILURE;
for (uint32_t i = 0; i < plaintexts.Length(); ++i) {
const nsCString& plaintext = plaintexts[i];
nsCString cipherText;
rv = sdrService->EncryptString(plaintext, cipherText);
if (NS_WARN_IF(NS_FAILED(rv))) {
break;
}
cipherTexts.AppendElement(NS_ConvertASCIItoUTF16(cipherText));
}
nsCOMPtr<nsIRunnable> runnable(
NS_NewRunnableFunction("BackgroundSdrEncryptStringsResolve",
[rv, aPromise = Move(aPromise), cipherTexts = Move(cipherTexts)]() {
if (NS_FAILED(rv)) {
aPromise->MaybeReject(rv);
} else {
aPromise->MaybeResolve(cipherTexts);
}
}));
NS_DispatchToMainThread(runnable);
}
SecretDecoderRing::SecretDecoderRing()
{
}
@ -132,6 +165,51 @@ SecretDecoderRing::EncryptString(const nsACString& text,
return NS_OK;
}
NS_IMETHODIMP
SecretDecoderRing::AsyncEncryptStrings(uint32_t plaintextsCount,
const char16_t** plaintexts,
JSContext* aCx,
nsISupports** aPromise) {
MOZ_RELEASE_ASSERT(NS_IsMainThread());
NS_ENSURE_ARG(plaintextsCount);
NS_ENSURE_ARG_POINTER(plaintexts);
NS_ENSURE_ARG_POINTER(aCx);
nsIGlobalObject* globalObject =
xpc::NativeGlobal(JS::CurrentGlobalOrNull(aCx));
if (NS_WARN_IF(!globalObject)) {
return NS_ERROR_UNEXPECTED;
}
ErrorResult result;
RefPtr<Promise> promise = Promise::Create(globalObject, result);
if (NS_WARN_IF(result.Failed())) {
return result.StealNSResult();
}
InfallibleTArray<nsCString> plaintextsUtf8(plaintextsCount);
for (uint32_t i = 0; i < plaintextsCount; ++i) {
plaintextsUtf8.AppendElement(NS_ConvertUTF16toUTF8(plaintexts[i]));
}
nsCOMPtr<nsIRunnable> runnable(
NS_NewRunnableFunction("BackgroundSdrEncryptStrings",
[promise, plaintextsUtf8 = Move(plaintextsUtf8)]() mutable {
BackgroundSdrEncryptStrings(plaintextsUtf8, promise);
}));
nsCOMPtr<nsIThread> encryptionThread;
nsresult rv = NS_NewNamedThread("AsyncSDRThread",
getter_AddRefs(encryptionThread),
runnable);
if (NS_WARN_IF(NS_FAILED(rv))) {
return rv;
}
promise.forget(aPromise);
return NS_OK;
}
NS_IMETHODIMP
SecretDecoderRing::DecryptString(const nsACString& encryptedBase64Text,
/*out*/ nsACString& decryptedText)

View File

@ -20,7 +20,7 @@ class SecretDecoderRing : public nsISecretDecoderRing
, public nsNSSShutDownObject
{
public:
NS_DECL_ISUPPORTS
NS_DECL_THREADSAFE_ISUPPORTS
NS_DECL_NSISECRETDECODERRING
SecretDecoderRing();

View File

@ -483,14 +483,6 @@ nsCertTree::GetCertsByTypeFromCertList(CERTCertList *aCertList,
addOverrides = true;
}
else
if (aWantedType == nsIX509Cert::UNKNOWN_CERT
&& thisCertType == nsIX509Cert::UNKNOWN_CERT) {
// This unknown cert was stored without trust.
// If there are associated overrides, do not show as unknown.
// If there are no associated overrides, display as unknown.
wantThisCertIfNoOverrides = true;
}
else
if (aWantedType == nsIX509Cert::SERVER_CERT
&& thisCertType == nsIX509Cert::SERVER_CERT) {
// This server cert is explicitly marked as a web site peer,

View File

@ -22,6 +22,20 @@ interface nsISecretDecoderRing: nsISupports {
[must_use]
ACString encryptString(in ACString text);
/**
* Run encryptString on multiple strings, asynchronously. This will allow you
* to not jank the browser if you need to encrypt a large number of strings
* all at once. This method accepts an array of wstrings which it will convert
* to UTF-8 internally before encrypting.
*
* @param plaintextsCount the number of strings to encrypt.
* @param plaintexts the strings to encrypt.
* @return A promise for the list of encrypted strings, encoded as Base64.
*/
[implicit_jscontext, must_use]
nsISupports asyncEncryptStrings(in unsigned long plaintextsCount,
[array, size_is(plaintextsCount)] in wstring plaintexts);
/**
* Decrypt Base64 input.
* See the encryptString() documentation - this method has basically the same

View File

@ -181,16 +181,6 @@ add_task(async function testDeleteCACerts() {
expectedImpact);
});
// Test deleting certs from the "Other" tab.
add_task(async function testDeleteOtherCerts() {
const expectedTitle = "Delete Certificates";
const expectedConfirmMsg =
"Are you sure you want to delete these certificates?";
const expectedImpact = "";
await testHelper("orphan_tab", expectedTitle, expectedConfirmMsg,
expectedImpact);
});
// Test that the right values are returned when the dialog is accepted.
add_task(async function testAcceptDialogReturnValues() {
let [win, retVals] = await openDeleteCertConfirmDialog("ca_tab" /* arbitrary */);

View File

@ -21,7 +21,7 @@ const gTokenPasswordDialogs = {
QueryInterface: XPCOMUtils.generateQI([Ci.nsITokenPasswordDialogs])
};
function run_test() {
add_task(function testEncryptString() {
let sdr = Cc["@mozilla.org/security/sdr;1"]
.getService(Ci.nsISecretDecoderRing);
@ -78,4 +78,42 @@ function run_test() {
equal(gSetPasswordShownCount, 1,
"changePassword() dialog should have been shown exactly once");
}
}
});
add_task(async function testAsyncEncryptStrings() {
let sdr = Cc["@mozilla.org/security/sdr;1"]
.getService(Ci.nsISecretDecoderRing);
// Test valid inputs for encryptString() and decryptString().
let inputs = [
"",
" ", // First printable latin1 character (code point 32).
"foo",
"1234567890`~!@#$%^&*()-_=+{[}]|\\:;'\",<.>/?",
"¡äöüÿ", // Misc + last printable latin1 character (code point 255).
"aaa 一二三", // Includes Unicode with code points outside [0, 255].
];
let encrypteds = await sdr.asyncEncryptStrings(inputs.length, inputs);
for (let i = 0; i < inputs.length; i++) {
let encrypted = encrypteds[i];
let input = inputs[i];
let converter = Cc["@mozilla.org/intl/scriptableunicodeconverter"]
.createInstance(Ci.nsIScriptableUnicodeConverter);
converter.charset = "UTF-8";
let convertedInput = converter.ConvertFromUnicode(input);
convertedInput += converter.Finish();
notEqual(convertedInput, encrypted,
"Encrypted input should not just be the input itself");
try {
atob(encrypted);
} catch (e) {
ok(false, `encryptString() should have returned Base64: ${e}`);
}
equal(convertedInput, sdr.decryptString(encrypted),
"decryptString(encryptString(input)) should return input");
}
});

View File

@ -100,8 +100,8 @@ RotaryTracker.prototype = {
this.RotaryEngine = function RotaryEngine(service) {
SyncEngine.call(this, "Rotary", service);
// Ensure that the engine starts with a clean slate.
this.toFetch = [];
this.previousFailed = [];
this.toFetch = new SerializableSet();
this.previousFailed = new SerializableSet();
};
RotaryEngine.prototype = {
__proto__: SyncEngine.prototype,

View File

@ -194,9 +194,11 @@ class BookmarkRepairRequestor extends CollectionRepairRequestor {
for (let id of validationInfo.problems.serverMissing) {
engine.addForWeakUpload(id);
}
let toFetch = engine.toFetch.concat(validationInfo.problems.clientMissing,
validationInfo.problems.serverDeleted);
engine.toFetch = Array.from(new Set(toFetch));
engine.toFetch = Utils.setAddAll(
Utils.setAddAll(engine.toFetch,
validationInfo.problems.clientMissing),
validationInfo.problems.serverDeleted
);
return true;
}

View File

@ -842,6 +842,9 @@ SyncEngine.prototype = {
if (!json.ids) {
json.ids = [];
}
// The set serializes the same way as an array, but offers more efficient
// methods of manipulation.
json.ids = new SerializableSet(json.ids);
return json;
},
@ -892,6 +895,12 @@ SyncEngine.prototype = {
/*
* lastSync is a timestamp in server time.
*/
async getLastSync() {
return this.lastSync;
},
async setLastSync(lastSync) {
this.lastSync = lastSync;
},
get lastSync() {
return this._lastSync;
},
@ -911,6 +920,9 @@ SyncEngine.prototype = {
},
set toFetch(ids) {
if (ids.constructor.name != "SerializableSet") {
throw new Error("Bug: Attempted to set toFetch to something that isn't a SerializableSet");
}
this._toFetchStorage.data = { ids };
this._toFetchStorage.saveSoon();
},
@ -919,7 +931,12 @@ SyncEngine.prototype = {
this._previousFailedStorage.ensureDataReady();
return this._previousFailedStorage.data.ids;
},
set previousFailed(ids) {
if (ids.constructor.name != "SerializableSet") {
throw new Error(
"Bug: Attempted to set previousFailed to something that isn't a SerializableSet");
}
this._previousFailedStorage.data = { ids };
this._previousFailedStorage.saveSoon();
},
@ -1018,13 +1035,7 @@ SyncEngine.prototype = {
// the end of a sync, or after an error, we add all objects remaining in
// this._modified to the tracker.
this.lastSyncLocal = Date.now();
let initialChanges;
if (this.lastSync) {
initialChanges = await this.pullNewChanges();
} else {
this._log.debug("First sync, uploading all items");
initialChanges = await this.pullAllChanges();
}
let initialChanges = await this.pullChanges();
this._modified.replace(initialChanges);
// Clear the tracker now. If the sync fails we'll add the ones we failed
// to upload back.
@ -1038,6 +1049,15 @@ SyncEngine.prototype = {
this._delete = {};
},
async pullChanges() {
let lastSync = await this.getLastSync();
if (lastSync) {
return this.pullNewChanges();
}
this._log.debug("First sync, uploading all items");
return this.pullAllChanges();
},
/**
* A tiny abstraction to make it easier to test incoming record
* application.
@ -1069,8 +1089,9 @@ SyncEngine.prototype = {
this._log.trace("Downloading & applying server changes");
let newitems = this.itemSource();
let lastSync = await this.getLastSync();
newitems.newer = this.lastSync;
newitems.newer = lastSync;
newitems.full = true;
let downloadLimit = Infinity;
@ -1100,13 +1121,13 @@ SyncEngine.prototype = {
// reconciled => number of items that were reconciled.
let count = {applied: 0, failed: 0, newFailed: 0, reconciled: 0};
let recordsToApply = [];
let failedInCurrentSync = [];
let failedInCurrentSync = new SerializableSet();
let oldestModified = this.lastModified;
let downloadedIDs = new Set();
// Stage 1: Fetch new records from the server, up to the download limit.
if (this.lastModified == null || this.lastModified > this.lastSync) {
if (this.lastModified == null || this.lastModified > lastSync) {
let { response, records } = await newitems.getBatched(this.downloadBatchSize);
if (!response.success) {
response.failureCode = ENGINE_DOWNLOAD_FAIL;
@ -1124,7 +1145,7 @@ SyncEngine.prototype = {
let { shouldApply, error } = await this._maybeReconcile(record);
if (error) {
failedInCurrentSync.push(record.id);
failedInCurrentSync.add(record.id);
count.failed++;
continue;
}
@ -1136,7 +1157,7 @@ SyncEngine.prototype = {
}
let failedToApply = await this._applyRecords(recordsToApply);
failedInCurrentSync.push(...failedToApply);
Utils.setAddAll(failedInCurrentSync, failedToApply);
// `applied` is a bit of a misnomer: it counts records that *should* be
// applied, so it also includes records that we tried to apply and failed.
@ -1153,7 +1174,7 @@ SyncEngine.prototype = {
if (downloadedIDs.size == downloadLimit) {
let guidColl = this.itemSource();
guidColl.newer = this.lastSync;
guidColl.newer = lastSync;
guidColl.older = oldestModified;
guidColl.sort = "oldest";
@ -1165,14 +1186,15 @@ SyncEngine.prototype = {
// that in case the Sync server doesn't support `older` (bug 1316110).
let remainingIDs = guids.obj.filter(id => !downloadedIDs.has(id));
if (remainingIDs.length > 0) {
this.toFetch = Utils.arrayUnion(this.toFetch, remainingIDs);
this.toFetch = Utils.setAddAll(this.toFetch, remainingIDs);
}
}
// Fast-foward the lastSync timestamp since we have backlogged the
// remaining items.
if (this.lastSync < this.lastModified) {
this.lastSync = this.lastModified;
if (lastSync < this.lastModified) {
lastSync = this.lastModified;
await this.setLastSync(lastSync);
}
// Stage 3: Backfill records from the backlog, and those that failed to
@ -1180,8 +1202,9 @@ SyncEngine.prototype = {
// download limit, to prevent a large backlog for one engine from blocking
// the others. We'll keep processing the backlog on subsequent engine syncs.
let failedInPreviousSync = this.previousFailed;
let idsToBackfill = Utils.arrayUnion(this.toFetch.slice(0, downloadLimit),
failedInPreviousSync);
let idsToBackfill = Array.from(
Utils.setAddAll(Utils.subsetOfSize(this.toFetch, downloadLimit),
failedInPreviousSync));
// Note that we intentionally overwrite the previously failed list here.
// Records that fail to decrypt or apply in two consecutive syncs are likely
@ -1230,20 +1253,22 @@ SyncEngine.prototype = {
count.failed += failedToApply.length;
count.applied += backfilledRecordsToApply.length;
this.toFetch = Utils.arraySub(this.toFetch, ids);
this.previousFailed = Utils.arrayUnion(this.previousFailed, failedInBackfill);
this.toFetch = Utils.setDeleteAll(this.toFetch, ids);
this.previousFailed = Utils.setAddAll(this.previousFailed, failedInBackfill);
if (this.lastSync < this.lastModified) {
this.lastSync = this.lastModified;
if (lastSync < this.lastModified) {
lastSync = this.lastModified;
await this.setLastSync(lastSync);
}
}
count.newFailed = this.previousFailed.reduce((count, engine) => {
if (failedInPreviousSync.indexOf(engine) == -1) {
count++;
count.newFailed = 0;
for (let item of this.previousFailed) {
if (!failedInPreviousSync.has(item)) {
++count.newFailed;
}
return count;
}, 0);
}
count.succeeded = Math.max(0, count.applied - count.failed);
this._log.info(["Records:",
count.applied, "applied,",
@ -1611,6 +1636,7 @@ SyncEngine.prototype = {
let failed = [];
let successful = [];
let lastSync = await this.getLastSync();
let handleResponse = async (resp, batchOngoing = false) => {
// Note: We don't want to update this.lastSync, or this._modified until
// the batch is complete, however we want to remember success/failure
@ -1629,11 +1655,8 @@ SyncEngine.prototype = {
// Nothing to do yet
return;
}
// Advance lastSync since we've finished the batch.
let modified = resp.headers["x-weave-timestamp"];
if (modified > this.lastSync) {
this.lastSync = modified;
}
let serverModifiedTime = parseFloat(resp.headers["x-weave-timestamp"]);
if (failed.length && this._log.level <= Log.Level.Debug) {
this._log.debug("Records that will be uploaded again because "
+ "the server couldn't store them: "
@ -1646,14 +1669,20 @@ SyncEngine.prototype = {
this._modified.delete(id);
}
await this._onRecordsWritten(successful, failed);
await this._onRecordsWritten(successful, failed, serverModifiedTime);
// Advance lastSync since we've finished the batch.
if (serverModifiedTime > lastSync) {
lastSync = serverModifiedTime;
await this.setLastSync(lastSync);
}
// clear for next batch
failed.length = 0;
successful.length = 0;
};
let postQueue = up.newPostQueue(this._log, this.lastSync, handleResponse);
let postQueue = up.newPostQueue(this._log, lastSync, handleResponse);
for (let id of modifiedIDs) {
let out;
@ -1704,7 +1733,7 @@ SyncEngine.prototype = {
}
},
async _onRecordsWritten(succeeded, failed) {
async _onRecordsWritten(succeeded, failed, serverModifiedTime) {
// Implement this method to take specific actions against successfully
// uploaded records and failed records.
},
@ -1811,8 +1840,8 @@ SyncEngine.prototype = {
async _resetClient() {
this.resetLastSync();
this.previousFailed = [];
this.toFetch = [];
this.previousFailed = new SerializableSet();
this.toFetch = new SerializableSet();
this._needWeakUpload.clear();
},

View File

@ -4,7 +4,8 @@
this.EXPORTED_SYMBOLS = ["BookmarksEngine", "PlacesItem", "Bookmark",
"BookmarkFolder", "BookmarkQuery",
"Livemark", "BookmarkSeparator"];
"Livemark", "BookmarkSeparator",
"BufferedBookmarksEngine"];
var Cc = Components.classes;
var Ci = Components.interfaces;
@ -18,23 +19,26 @@ Cu.import("resource://services-sync/engines.js");
Cu.import("resource://services-sync/record.js");
Cu.import("resource://services-sync/util.js");
XPCOMUtils.defineLazyModuleGetter(this, "BookmarkValidator",
"resource://services-sync/bookmark_validator.js");
XPCOMUtils.defineLazyModuleGetters(this, {
SyncedBookmarksMirror: "resource://gre/modules/SyncedBookmarksMirror.jsm",
BookmarkValidator: "resource://services-sync/bookmark_validator.js",
OS: "resource://gre/modules/osfile.jsm",
PlacesBackups: "resource://gre/modules/PlacesBackups.jsm",
PlacesSyncUtils: "resource://gre/modules/PlacesSyncUtils.jsm",
PlacesUtils: "resource://gre/modules/PlacesUtils.jsm",
Resource: "resource://services-sync/resource.js",
});
XPCOMUtils.defineLazyGetter(this, "PlacesBundle", () => {
return Services.strings.createBundle("chrome://places/locale/places.properties");
});
XPCOMUtils.defineLazyModuleGetter(this, "PlacesUtils",
"resource://gre/modules/PlacesUtils.jsm");
XPCOMUtils.defineLazyModuleGetter(this, "PlacesSyncUtils",
"resource://gre/modules/PlacesSyncUtils.jsm");
XPCOMUtils.defineLazyModuleGetter(this, "PlacesBackups",
"resource://gre/modules/PlacesBackups.jsm");
const ANNOS_TO_TRACK = [PlacesSyncUtils.bookmarks.DESCRIPTION_ANNO,
PlacesSyncUtils.bookmarks.SIDEBAR_ANNO,
PlacesUtils.LMANNO_FEEDURI, PlacesUtils.LMANNO_SITEURI];
XPCOMUtils.defineLazyGetter(this, "ANNOS_TO_TRACK", () => [
PlacesSyncUtils.bookmarks.DESCRIPTION_ANNO,
PlacesSyncUtils.bookmarks.SIDEBAR_ANNO, PlacesUtils.LMANNO_FEEDURI,
PlacesUtils.LMANNO_SITEURI,
]);
const SERVICE_NOT_SUPPORTED = "Service not supported on this platform";
const FOLDER_SORTINDEX = 1000000;
const {
SOURCE_SYNC,
@ -43,10 +47,6 @@ const {
SOURCE_SYNC_REPARENT_REMOVED_FOLDER_CHILDREN,
} = Ci.nsINavBookmarksService;
const ORGANIZERQUERY_ANNO = "PlacesOrganizer/OrganizerQuery";
const ALLBOOKMARKS_ANNO = "AllBookmarks";
const MOBILE_ANNO = "MobileBookmarks";
// Roots that should be deleted from the server, instead of applied locally.
// This matches `AndroidBrowserBookmarksRepositorySession::forbiddenGUID`,
// but allows tags because we don't want to reparent tag folders or tag items
@ -128,7 +128,7 @@ PlacesItem.prototype = {
};
let dateAdded = PlacesSyncUtils.bookmarks.ratchetTimestampBackwards(
this.dateAdded, +this.modified * 1000);
if (dateAdded !== undefined) {
if (dateAdded > 0) {
result.dateAdded = dateAdded;
}
return result;
@ -273,13 +273,53 @@ BookmarkSeparator.prototype = {
Utils.deferGetSet(BookmarkSeparator, "cleartext", "pos");
this.BookmarksEngine = function BookmarksEngine(service) {
/**
* The rest of this file implements two different bookmarks engines and stores.
* The `services.sync.engine.bookmarks.buffer` pref controls which one we use.
* `BaseBookmarksEngine` and `BaseBookmarksStore` define a handful of methods
* shared between the two implementations.
*
* `BookmarksEngine` and `BookmarksStore` pull locally changed IDs before
* syncing, examine every incoming record, use the default record-level
* reconciliation to resolve merge conflicts, and update records in Places
* using public APIs. This is similar to how the other sync engines work.
*
* Unfortunately, this general approach doesn't serve bookmark sync well.
* Bookmarks form a tree locally, but they're stored as opaque, encrypted, and
* unordered records on the server. The records are interdependent, with a
* set of constraints: each parent must know the IDs and order of its children,
* and a child can't appear in multiple parents.
*
* This has two important implications.
*
* First, some changes require us to upload multiple records. For example,
* moving a bookmark into a different folder uploads the bookmark, old folder,
* and new folder.
*
* Second, conflict resolution, like adding a bookmark to a folder on one
* device, and moving a different bookmark out of the same folder on a different
* device, must account for the tree structure. Otherwise, we risk uploading an
* incomplete tree, and confuse other devices that try to sync.
*
* Historically, the lack of durable change tracking and atomic uploads meant
* that we'd miss these changes entirely, or leave the server in an inconsistent
* state after a partial sync. Another device would then sync, download and
* apply the partial state directly to Places, and upload its changes. This
* could easily result in Sync scrambling bookmarks on both devices, and user
* intervention to manually undo the damage would make things worse.
*
* `BufferedBookmarksEngine` and `BufferedBookmarksStore` mitigate this by
* mirroring incoming bookmarks in a separate database, constructing trees from
* the local and remote bookmarks, and merging the two trees into a single
* consistent tree that accounts for every bookmark. For more information about
* merging, please see the explanation above `SyncedBookmarksMirror`.
*/
function BaseBookmarksEngine(service) {
SyncEngine.call(this, "Bookmarks", service);
};
BookmarksEngine.prototype = {
}
BaseBookmarksEngine.prototype = {
__proto__: SyncEngine.prototype,
_recordObj: PlacesItem,
_storeObj: BookmarksStore,
_trackerObj: BookmarksTracker,
version: 2,
_defaultSort: "index",
@ -287,6 +327,70 @@ BookmarksEngine.prototype = {
syncPriority: 4,
allowSkippedRecord: false,
async _syncFinish() {
await SyncEngine.prototype._syncFinish.call(this);
await PlacesSyncUtils.bookmarks.ensureMobileQuery();
},
async _createRecord(id) {
if (this._modified.isTombstone(id)) {
// If we already know a changed item is a tombstone, just create the
// record without dipping into Places.
return this._createTombstone(id);
}
let record = await SyncEngine.prototype._createRecord.call(this, id);
if (record.deleted) {
// Make sure deleted items are marked as tombstones. We do this here
// in addition to the `isTombstone` call above because it's possible
// a changed bookmark might be deleted during a sync (bug 1313967).
this._modified.setTombstone(record.id);
}
return record;
},
async pullAllChanges() {
return this.pullNewChanges();
},
async trackRemainingChanges() {
let changes = this._modified.changes;
await PlacesSyncUtils.bookmarks.pushChanges(changes);
},
_deleteId(id) {
this._noteDeletedId(id);
},
async _resetClient() {
await super._resetClient();
await PlacesSyncUtils.bookmarks.reset();
},
// Cleans up the Places root, reading list items (ignored in bug 762118,
// removed in bug 1155684), and pinned sites.
_shouldDeleteRemotely(incomingItem) {
return FORBIDDEN_INCOMING_IDS.includes(incomingItem.id) ||
FORBIDDEN_INCOMING_PARENT_IDS.includes(incomingItem.parentid);
},
getValidator() {
return new BookmarkValidator();
}
};
/**
* The original bookmarks engine. Uses an in-memory GUID map for deduping, and
* the default implementation for reconciling changes. Handles child ordering
* and deletions at the end of a sync.
*/
this.BookmarksEngine = function BookmarksEngine(service) {
BaseBookmarksEngine.apply(this, arguments);
};
BookmarksEngine.prototype = {
__proto__: BaseBookmarksEngine.prototype,
_storeObj: BookmarksStore,
emptyChangeset() {
return new BookmarksChangeset();
},
@ -522,34 +626,21 @@ BookmarksEngine.prototype = {
this._store._childrenToOrder = {};
},
async _syncFinish() {
await SyncEngine.prototype._syncFinish.call(this);
await PlacesSyncUtils.bookmarks.ensureMobileQuery();
},
async _syncCleanup() {
await SyncEngine.prototype._syncCleanup.call(this);
delete this._guidMap;
},
async _createRecord(id) {
if (this._modified.isTombstone(id)) {
// If we already know a changed item is a tombstone, just create the
// record without dipping into Places.
return this._createTombstone(id);
let record = await super._createRecord(id);
if (record.deleted) {
return record;
}
// Create the record as usual, but mark it as having dupes if necessary.
let record = await SyncEngine.prototype._createRecord.call(this, id);
// Mark the record as having dupes if necessary.
let entry = await this._mapDupe(record);
if (entry != null && entry.hasDupe) {
record.hasDupe = true;
}
if (record.deleted) {
// Make sure deleted items are marked as tombstones. We do this here
// in addition to the `isTombstone` call above because it's possible
// a changed bookmark might be deleted during a sync (bug 1313967).
this._modified.setTombstone(record.id);
}
return record;
},
@ -569,28 +660,10 @@ BookmarksEngine.prototype = {
return mapped ? mapped.toString() : mapped;
},
async pullAllChanges() {
return this.pullNewChanges();
},
async pullNewChanges() {
return this._tracker.promiseChangedIDs();
},
async trackRemainingChanges() {
let changes = this._modified.changes;
await PlacesSyncUtils.bookmarks.pushChanges(changes);
},
_deleteId(id) {
this._noteDeletedId(id);
},
async _resetClient() {
await SyncEngine.prototype._resetClient.call(this);
await PlacesSyncUtils.bookmarks.reset();
},
// Called when _findDupe returns a dupe item and the engine has decided to
// switch the existing item to the new incoming item.
async _switchItemToDupe(localDupeGUID, incomingItem) {
@ -600,13 +673,6 @@ BookmarksEngine.prototype = {
this._modified.insert(newChanges);
},
// Cleans up the Places root, reading list items (ignored in bug 762118,
// removed in bug 1155684), and pinned sites.
_shouldDeleteRemotely(incomingItem) {
return FORBIDDEN_INCOMING_IDS.includes(incomingItem.id) ||
FORBIDDEN_INCOMING_PARENT_IDS.includes(incomingItem.parentid);
},
beforeRecordDiscard(localRecord, remoteRecord, remoteIsNewer) {
if (localRecord.type != "folder" || remoteRecord.type != "folder") {
return;
@ -629,18 +695,228 @@ BookmarksEngine.prototype = {
this._log.debug("Recording children of " + localRecord.id, order);
this._store._childrenToOrder[localRecord.id] = order;
},
};
getValidator() {
return new BookmarkValidator();
/**
* The buffered bookmarks engine uses a different store that stages downloaded
* bookmarks in a separate database, instead of writing directly to Places. The
* buffer handles reconciliation, so we stub out `_reconcile`, and wait to pull
* changes until we're ready to upload.
*/
this.BufferedBookmarksEngine = function BufferedBookmarksEngine() {
BaseBookmarksEngine.apply(this, arguments);
};
BufferedBookmarksEngine.prototype = {
__proto__: BaseBookmarksEngine.prototype,
_storeObj: BufferedBookmarksStore,
async getLastSync() {
let mirror = await this._store.ensureOpenMirror();
return mirror.getCollectionHighWaterMark();
},
async setLastSync(lastSync) {
let mirror = await this._store.ensureOpenMirror();
await mirror.setCollectionLastModified(lastSync);
// Update the pref so that reverting to the original bookmarks engine
// doesn't download records we've already applied.
super.lastSync = lastSync;
},
get lastSync() {
throw new TypeError("Use getLastSync");
},
set lastSync(value) {
throw new TypeError("Use setLastSync");
},
emptyChangeset() {
return new BufferedBookmarksChangeset();
},
async _processIncoming(newitems) {
try {
await super._processIncoming(newitems);
} finally {
let buf = await this._store.ensureOpenMirror();
let recordsToUpload = await buf.apply({
remoteTimeSeconds: Resource.serverTime,
});
this._modified.replace(recordsToUpload);
}
},
async _reconcile(item) {
return true;
},
async _createRecord(id) {
if (this._needWeakUpload.has(id)) {
return this._store.createRecord(id, this.name);
}
let change = this._modified.changes[id];
if (!change) {
this._log.error("Creating record for item ${id} not in strong " +
"changeset", { id });
throw new TypeError("Can't create record for unchanged item");
}
let record = this._recordFromCleartext(id, change.cleartext);
record.sortindex = await this._store._calculateIndex(record);
return record;
},
_recordFromCleartext(id, cleartext) {
let recordObj = getTypeObject(cleartext.type);
if (!recordObj) {
this._log.warn("Creating record for item ${id} with unknown type ${type}",
{ id, type: cleartext.type });
recordObj = PlacesItem;
}
let record = new recordObj(this.name, id);
record.cleartext = cleartext;
return record;
},
async pullChanges() {
return {};
},
/**
* Writes successfully uploaded records back to the mirror, so that the
* mirror matches the server. We update the mirror before updating Places,
* which has implications for interrupted syncs.
*
* 1. Sync interrupted during upload; server doesn't support atomic uploads.
* We'll download and reapply everything that we uploaded before the
* interruption. All locally changed items retain their change counters.
* 2. Sync interrupted during upload; atomic uploads enabled. The server
* discards the batch. All changed local items retain their change
* counters, so the next sync resumes cleanly.
* 3. Sync interrupted during upload; outgoing records can't fit in a single
* batch. We'll download and reapply all records through the most recent
* committed batch. This is a variation of (1).
* 4. Sync interrupted after we update the mirror, but before cleanup. The
* mirror matches the server, but locally changed items retain their change
* counters. Reuploading them on the next sync should be idempotent, though
* unnecessary. If another client makes a conflicting remote change before
* we sync again, we may incorrectly prefer the local state.
* 5. Sync completes successfully. We'll update the mirror, and reset the
* change counters for all items.
*/
async _onRecordsWritten(succeeded, failed, serverModifiedTime) {
let records = [];
for (let id of succeeded) {
let change = this._modified.changes[id];
if (!change) {
// TODO (Bug 1433178): Write weakly uploaded records back to the mirror.
this._log.info("Uploaded record not in strong changeset", id);
continue;
}
if (!change.synced) {
this._log.info("Record in strong changeset not uploaded", id);
continue;
}
let cleartext = change.cleartext;
if (!cleartext) {
this._log.error("Missing Sync record cleartext for ${id} in ${change}",
{ id, change });
throw new TypeError("Missing cleartext for uploaded Sync record");
}
let record = this._recordFromCleartext(id, cleartext);
record.modified = serverModifiedTime;
records.push(record);
}
let buf = await this._store.ensureOpenMirror();
await buf.store(records, { needsMerge: false });
},
async _resetClient() {
await super._resetClient();
let buf = await this._store.ensureOpenMirror();
await buf.reset();
},
async finalize() {
await super.finalize();
await this._store.finalize();
},
};
/**
* The only code shared between `BookmarksStore` and `BufferedBookmarksStore`
* is for creating Sync records from Places items. Everything else is
* different.
*/
function BaseBookmarksStore(name, engine) {
Store.call(this, name, engine);
}
BaseBookmarksStore.prototype = {
__proto__: Store.prototype,
// Create a record starting from the weave id (places guid)
async createRecord(id, collection) {
let item = await PlacesSyncUtils.bookmarks.fetch(id);
if (!item) { // deleted item
let record = new PlacesItem(collection, id);
record.deleted = true;
return record;
}
let recordObj = getTypeObject(item.kind);
if (!recordObj) {
this._log.warn("Unknown item type, cannot serialize: " + item.kind);
recordObj = PlacesItem;
}
let record = new recordObj(collection, id);
record.fromSyncBookmark(item);
record.sortindex = await this._calculateIndex(record);
return record;
},
async _calculateIndex(record) {
// Ensure folders have a very high sort index so they're not synced last.
if (record.type == "folder")
return FOLDER_SORTINDEX;
// For anything directly under the toolbar, give it a boost of more than an
// unvisited bookmark
let index = 0;
if (record.parentid == "toolbar")
index += 150;
// Add in the bookmark's frecency if we have something.
if (record.bmkUri != null) {
let frecency = await PlacesSyncUtils.history.fetchURLFrecency(record.bmkUri);
if (frecency != -1)
index += frecency;
}
return index;
},
async wipe() {
// Save a backup before clearing out all bookmarks.
await PlacesBackups.create(null, true);
await PlacesSyncUtils.bookmarks.wipe();
}
};
function BookmarksStore(name, engine) {
Store.call(this, name, engine);
/**
* The original store updates Places during the sync, using public methods.
* `BookmarksStore` implements all abstract `Store` methods, and behaves like
* the other stores.
*/
function BookmarksStore() {
BaseBookmarksStore.apply(this, arguments);
this._itemsToDelete = new Set();
}
BookmarksStore.prototype = {
__proto__: Store.prototype,
__proto__: BaseBookmarksStore.prototype,
async itemExists(id) {
return (await this.idForGUID(id)) > 0;
@ -777,29 +1053,6 @@ BookmarksStore.prototype = {
this._itemsToDelete.clear();
},
// Create a record starting from the weave id (places guid)
async createRecord(id, collection) {
let item = await PlacesSyncUtils.bookmarks.fetch(id);
if (!item) { // deleted item
let record = new PlacesItem(collection, id);
record.deleted = true;
return record;
}
let recordObj = getTypeObject(item.kind);
if (!recordObj) {
this._log.warn("Unknown item type, cannot serialize: " + item.kind);
recordObj = PlacesItem;
}
let record = new recordObj(collection, id);
record.fromSyncBookmark(item);
record.sortindex = await this._calculateIndex(record);
return record;
},
async GUIDForId(id) {
let guid = await PlacesUtils.promiseItemGuid(id);
return PlacesSyncUtils.bookmarks.guidToRecordId(guid);
@ -816,35 +1069,74 @@ BookmarksStore.prototype = {
}
},
async _calculateIndex(record) {
// Ensure folders have a very high sort index so they're not synced last.
if (record.type == "folder")
return FOLDER_SORTINDEX;
// For anything directly under the toolbar, give it a boost of more than an
// unvisited bookmark
let index = 0;
if (record.parentid == "toolbar")
index += 150;
// Add in the bookmark's frecency if we have something.
if (record.bmkUri != null) {
let frecency = await PlacesSyncUtils.history.fetchURLFrecency(record.bmkUri);
if (frecency != -1)
index += frecency;
}
return index;
},
async wipe() {
this.clearPendingDeletions();
// Save a backup before clearing out all bookmarks.
await PlacesBackups.create(null, true);
await PlacesSyncUtils.bookmarks.wipe();
await super.wipe();
}
};
/**
* The buffered store delegates to the mirror for staging and applying
* records. Unlike `BookmarksStore`, `BufferedBookmarksStore` only
* implements `applyIncoming`, and `createRecord` via `BaseBookmarksStore`.
* These are the only two methods that `BufferedBookmarksEngine` calls during
* download and upload.
*
* The other `Store` methods intentionally remain abstract, so you can't use
* this store to create or update bookmarks in Places. All changes must go
* through the mirror, which takes care of merging and producing a valid tree.
*/
function BufferedBookmarksStore() {
BaseBookmarksStore.apply(this, arguments);
}
BufferedBookmarksStore.prototype = {
__proto__: BaseBookmarksStore.prototype,
_openMirrorPromise: null,
ensureOpenMirror() {
if (!this._openMirrorPromise) {
this._openMirrorPromise = this._openMirror().catch(err => {
// We may have failed to open the mirror temporarily; for example, if
// the database is locked. Clear the promise so that subsequent
// `ensureOpenMirror` calls can try to open the mirror again.
this._openMirrorPromise = null;
throw err;
});
}
return this._openMirrorPromise;
},
async _openMirror() {
let mirrorPath = OS.Path.join(OS.Constants.Path.profileDir, "weave",
"bookmarks.sqlite");
await OS.File.makeDir(OS.Path.dirname(mirrorPath), {
from: OS.Constants.Path.profileDir,
});
return SyncedBookmarksMirror.open({
path: mirrorPath,
recordTelemetryEvent: (object, method, value, extra) => {
this.engine.service.recordTelemetryEvent(object, method, value,
extra);
},
});
},
async applyIncoming(record) {
let buf = await this.ensureOpenMirror();
await buf.store([record]);
},
async finalize() {
if (!this._openMirrorPromise) {
return;
}
let buf = await this._openMirrorPromise;
await buf.finalize();
},
};
// The bookmarks tracker is a special flower. Instead of listening for changes
// via observer notifications, it queries Places for the set of items that have
// changed since the last sync. Because it's a "pull-based" tracker, it ignores
@ -853,7 +1145,6 @@ BookmarksStore.prototype = {
function BookmarksTracker(name, engine) {
this._batchDepth = 0;
this._batchSawScoreIncrement = false;
this._migratedOldEntries = false;
Tracker.call(this, name, engine);
}
BookmarksTracker.prototype = {
@ -912,60 +1203,11 @@ BookmarksTracker.prototype = {
throw new Error("Don't set initial changed bookmark IDs");
},
// Migrates tracker entries from the old JSON-based tracker to Places. This
// is called the first time we start tracking changes.
async _migrateOldEntries() {
let existingIDs = await Utils.jsonLoad("changes/" + this.file, this);
if (existingIDs === null) {
// If the tracker file doesn't exist, we don't need to migrate, even if
// the engine is enabled. It's possible we're upgrading before the first
// sync. In the worst case, getting this wrong has the same effect as a
// restore: we'll reupload everything to the server.
this._log.debug("migrateOldEntries: Missing bookmarks tracker file; " +
"skipping migration");
return null;
}
if (!this._needsMigration()) {
// We have a tracker file, but bookmark syncing is disabled, or this is
// the first sync. It's likely the tracker file is stale. Remove it and
// skip migration.
this._log.debug("migrateOldEntries: Bookmarks engine disabled or " +
"first sync; skipping migration");
return Utils.jsonRemove("changes/" + this.file, this);
}
// At this point, we know the engine is enabled, we have a tracker file
// (though it may be empty), and we've synced before.
this._log.debug("migrateOldEntries: Migrating old tracker entries");
let entries = [];
for (let id in existingIDs) {
let change = existingIDs[id];
// Allow raw timestamps for backward-compatibility with changed IDs
// persisted before bug 1274496.
let timestamp = typeof change == "number" ? change : change.modified;
entries.push({
recordId: id,
modified: timestamp * 1000,
});
}
await PlacesSyncUtils.bookmarks.migrateOldTrackerEntries(entries);
return Utils.jsonRemove("changes/" + this.file, this);
},
_needsMigration() {
return this.engine && this.engineIsEnabled() && this.engine.lastSync > 0;
},
observe: function observe(subject, topic, data) {
Tracker.prototype.observe.call(this, subject, topic, data);
switch (topic) {
case "weave:engine:start-tracking":
if (!this._migratedOldEntries) {
this._migratedOldEntries = true;
Async.promiseSpinningly(this._migrateOldEntries());
}
break;
case "bookmarks-restore-begin":
this._log.debug("Ignoring changes from importing bookmarks.");
@ -1078,16 +1320,46 @@ BookmarksTracker.prototype = {
onItemVisited() {}
};
class BookmarksChangeset extends Changeset {
getStatus(id) {
let change = this.changes[id];
if (!change) {
return PlacesUtils.bookmarks.SYNC_STATUS.UNKNOWN;
}
return change.status;
/**
* A changeset that stores extra metadata in a change record for each ID. The
* engine updates this metadata when uploading Sync records, and writes it back
* to Places in `BaseBookmarksEngine#trackRemainingChanges`.
*
* The `synced` property on a change record means its corresponding item has
* been uploaded, and we should pretend it doesn't exist in the changeset.
*/
class BufferedBookmarksChangeset extends Changeset {
// Only `_reconcile` calls `getModifiedTimestamp` and `has`, and the buffered
// engine does its own reconciliation.
getModifiedTimestamp(id) {
throw new Error("Don't use timestamps to resolve bookmark conflicts");
}
has(id) {
throw new Error("Don't use the changeset to resolve bookmark conflicts");
}
delete(id) {
let change = this.changes[id];
if (change) {
// Mark the change as synced without removing it from the set. We do this
// so that we can update Places in `trackRemainingChanges`.
change.synced = true;
}
}
ids() {
let results = new Set();
for (let id in this.changes) {
if (!this.changes[id].synced) {
results.add(id);
}
}
return [...results];
}
}
class BookmarksChangeset extends BufferedBookmarksChangeset {
getModifiedTimestamp(id) {
let change = this.changes[id];
if (change) {
@ -1113,25 +1385,6 @@ class BookmarksChangeset extends Changeset {
}
}
delete(id) {
let change = this.changes[id];
if (change) {
// Mark the change as synced without removing it from the set. We do this
// so that we can update Places in `trackRemainingChanges`.
change.synced = true;
}
}
ids() {
let results = new Set();
for (let id in this.changes) {
if (!this.changes[id].synced) {
results.add(id);
}
}
return [...results];
}
isTombstone(id) {
let change = this.changes[id];
if (change) {

View File

@ -39,7 +39,6 @@ Cu.import("resource://services-sync/util.js");
function getEngineModules() {
let result = {
Addons: {module: "addons.js", symbol: "AddonsEngine"},
Bookmarks: {module: "bookmarks.js", symbol: "BookmarksEngine"},
Form: {module: "forms.js", symbol: "FormEngine"},
History: {module: "history.js", symbol: "HistoryEngine"},
Password: {module: "passwords.js", symbol: "PasswordEngine"},
@ -59,6 +58,17 @@ function getEngineModules() {
symbol: "CreditCardsEngine",
};
}
if (Svc.Prefs.get("engine.bookmarks.buffer", false)) {
result.Bookmarks = {
module: "bookmarks.js",
symbol: "BufferedBookmarksEngine",
};
} else {
result.Bookmarks = {
module: "bookmarks.js",
symbol: "BookmarksEngine",
};
}
return result;
}

View File

@ -2,7 +2,7 @@
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
this.EXPORTED_SYMBOLS = ["Utils", "Svc"];
this.EXPORTED_SYMBOLS = ["Utils", "Svc", "SerializableSet"];
var {classes: Cc, interfaces: Ci, results: Cr, utils: Cu} = Components;
@ -524,6 +524,47 @@ this.Utils = {
return foo.concat(Utils.arraySub(bar, foo));
},
/**
* Add all the items in `items` to the provided Set in-place.
*
* @return The provided set.
*/
setAddAll(set, items) {
for (let item of items) {
set.add(item);
}
return set;
},
/**
* Delete every items in `items` to the provided Set in-place.
*
* @return The provided set.
*/
setDeleteAll(set, items) {
for (let item of items) {
set.delete(item);
}
return set;
},
/**
* Take the first `size` items from the Set `items`.
*
* @return A Set of size at most `size`
*/
subsetOfSize(items, size) {
let result = new Set();
let count = 0;
for (let item of items) {
if (count++ == size) {
return result;
}
result.add(item);
}
return result;
},
bind2: function Async_bind2(object, method) {
return function innerBind() { return method.apply(object, arguments); };
},
@ -703,6 +744,15 @@ this.Utils = {
}
};
/**
* A subclass of Set that serializes as an Array when passed to JSON.stringify.
*/
class SerializableSet extends Set {
toJSON() {
return Array.from(this);
}
}
XPCOMUtils.defineLazyGetter(Utils, "_utf8Converter", function() {
let converter = Cc["@mozilla.org/intl/scriptableunicodeconverter"]
.createInstance(Ci.nsIScriptableUnicodeConverter);

View File

@ -25,6 +25,7 @@ pref("services.sync.errorhandler.networkFailureReportTimeout", 1209600); // 2 we
pref("services.sync.engine.addons", true);
pref("services.sync.engine.addresses", false);
pref("services.sync.engine.bookmarks", true);
pref("services.sync.engine.bookmarks.buffer", false);
pref("services.sync.engine.creditcards", false);
pref("services.sync.engine.history", true);
pref("services.sync.engine.passwords", true);

View File

@ -38,11 +38,31 @@ add_task(async function setup() {
await Service.engineManager.unregister("bookmarks");
});
add_task(async function test_delete_invalid_roots_from_server() {
function add_bookmark_test(task) {
add_task(async function() {
_(`Running test ${task.name} with legacy bookmarks engine`);
let legacyEngine = new BookmarksEngine(Service);
await legacyEngine.initialize();
try {
await task(legacyEngine);
} finally {
await legacyEngine.finalize();
}
_(`Running test ${task.name} with buffered bookmarks engine`);
let bufferedEngine = new BufferedBookmarksEngine(Service);
await bufferedEngine.initialize();
try {
await task(bufferedEngine);
} finally {
await bufferedEngine.finalize();
}
});
}
add_bookmark_test(async function test_delete_invalid_roots_from_server(engine) {
_("Ensure that we delete the Places and Reading List roots from the server.");
let engine = new BookmarksEngine(Service);
await engine.initialize();
let store = engine._store;
let server = await serverForFoo(engine);
await SyncTestingInfrastructure(server);
@ -81,9 +101,13 @@ add_task(async function test_delete_invalid_roots_from_server() {
await sync_engine_and_validate_telem(engine, false);
ok(!(await store.itemExists("readinglist")), "Should not apply Reading List root");
ok(!(await store.itemExists(listBmk.id)), "Should not apply items in Reading List");
ok((await store.itemExists(newBmk.id)), "Should apply new bookmark");
await Assert.rejects(PlacesUtils.promiseItemId("readinglist"),
/no item found for the given GUID/, "Should not apply Reading List root");
await Assert.rejects(PlacesUtils.promiseItemId(listBmk.id),
/no item found for the given GUID/,
"Should not apply items in Reading List");
ok((await PlacesUtils.promiseItemId(newBmk.id)) > 0,
"Should apply new bookmark");
deepEqual(collection.keys().sort(), ["menu", "mobile", "toolbar", "unfiled", newBmk.id].sort(),
"Should remove Places root and reading list items from server; upload local roots");
@ -122,11 +146,9 @@ add_task(async function bad_record_allIDs() {
await promiseStopServer(server);
});
add_task(async function test_processIncoming_error_orderChildren() {
add_bookmark_test(async function test_processIncoming_error_orderChildren(engine) {
_("Ensure that _orderChildren() is called even when _processIncoming() throws an error.");
let engine = new BookmarksEngine(Service);
await engine.initialize();
let store = engine._store;
let server = await serverForFoo(engine);
await SyncTestingInfrastructure(server);
@ -152,6 +174,15 @@ add_task(async function test_processIncoming_error_orderChildren() {
title: "Get Thunderbird!",
});
let toolbar_record = await store.createRecord("toolbar");
collection.insert("toolbar", encryptPayload(toolbar_record.cleartext));
let bmk1_record = await store.createRecord(bmk1.guid);
collection.insert(bmk1.guid, encryptPayload(bmk1_record.cleartext));
let bmk2_record = await store.createRecord(bmk2.guid);
collection.insert(bmk2.guid, encryptPayload(bmk2_record.cleartext));
// Create a server record for folder1 where we flip the order of
// the children.
let folder1_record = await store.createRecord(folder1.guid);
@ -169,8 +200,8 @@ add_task(async function test_processIncoming_error_orderChildren() {
// Make the 10 minutes old so it will only be synced in the toFetch phase.
bogus_record.modified = Date.now() / 1000 - 60 * 10;
engine.lastSync = Date.now() / 1000 - 60;
engine.toFetch = [BOGUS_GUID];
await engine.setLastSync(Date.now() / 1000 - 60);
engine.toFetch = new SerializableSet([BOGUS_GUID]);
let error;
try {
@ -200,25 +231,23 @@ add_task(async function test_processIncoming_error_orderChildren() {
}
});
add_task(async function test_restorePromptsReupload() {
await test_restoreOrImport(true);
add_bookmark_test(async function test_restorePromptsReupload(engine) {
await test_restoreOrImport(engine, { replace: true });
});
add_task(async function test_importPromptsReupload() {
await test_restoreOrImport(false);
add_bookmark_test(async function test_importPromptsReupload(engine) {
await test_restoreOrImport(engine, { replace: false });
});
// Test a JSON restore or HTML import. Use JSON if `aReplace` is `true`, or
// Test a JSON restore or HTML import. Use JSON if `replace` is `true`, or
// HTML otherwise.
async function test_restoreOrImport(aReplace) {
let verb = aReplace ? "restore" : "import";
let verbing = aReplace ? "restoring" : "importing";
let bookmarkUtils = aReplace ? BookmarkJSONUtils : BookmarkHTMLUtils;
async function test_restoreOrImport(engine, { replace }) {
let verb = replace ? "restore" : "import";
let verbing = replace ? "restoring" : "importing";
let bookmarkUtils = replace ? BookmarkJSONUtils : BookmarkHTMLUtils;
_(`Ensure that ${verbing} from a backup will reupload all records.`);
let engine = new BookmarksEngine(Service);
await engine.initialize();
let store = engine._store;
let server = await serverForFoo(engine);
await SyncTestingInfrastructure(server);
@ -278,10 +307,10 @@ async function test_restoreOrImport(aReplace) {
Assert.equal(wbos[0], bmk2.guid);
_(`Now ${verb} from a backup.`);
await bookmarkUtils.importFromFile(backupFilePath, aReplace);
await bookmarkUtils.importFromFile(backupFilePath, replace);
let bookmarksCollection = server.user("foo").collection("bookmarks");
if (aReplace) {
if (replace) {
_("Verify that we wiped the server.");
Assert.ok(!bookmarksCollection);
} else {
@ -290,29 +319,30 @@ async function test_restoreOrImport(aReplace) {
}
_("Ensure we have the bookmarks we expect locally.");
let guids = await fetchAllRecordIds();
_("GUIDs: " + JSON.stringify([...guids]));
let bookmarkGuids = new Map();
let recordIds = await fetchAllRecordIds();
_("GUIDs: " + JSON.stringify([...recordIds]));
let bookmarkRecordIds = new Map();
let count = 0;
for (let guid of guids) {
for (let recordId of recordIds) {
count++;
let info = await PlacesUtils.bookmarks.fetch(
PlacesSyncUtils.bookmarks.recordIdToGuid(guid));
PlacesSyncUtils.bookmarks.recordIdToGuid(recordId));
// Only one bookmark, so _all_ should be Firefox!
if (info.type == PlacesUtils.bookmarks.TYPE_BOOKMARK) {
_(`Found URI ${info.url.href} for GUID ${guid}`);
bookmarkGuids.set(info.url.href, guid);
_(`Found URI ${info.url.href} for record ID ${recordId}`);
bookmarkRecordIds.set(info.url.href, recordId);
}
}
Assert.ok(bookmarkGuids.has("http://getfirefox.com/"));
if (!aReplace) {
Assert.ok(bookmarkGuids.has("http://getthunderbird.com/"));
Assert.ok(bookmarkRecordIds.has("http://getfirefox.com/"));
if (!replace) {
Assert.ok(bookmarkRecordIds.has("http://getthunderbird.com/"));
}
_("Have the correct number of IDs locally, too.");
let expectedResults = ["menu", "toolbar", "mobile", "unfiled", folder1.guid,
bmk1.guid];
if (!aReplace) {
if (!replace) {
expectedResults.push("toolbar", folder1.guid, bmk2.guid);
}
Assert.equal(count, expectedResults.length);
@ -343,18 +373,18 @@ async function test_restoreOrImport(aReplace) {
});
let expectedFX = {
id: bookmarkGuids.get("http://getfirefox.com/"),
id: bookmarkRecordIds.get("http://getfirefox.com/"),
bmkUri: "http://getfirefox.com/",
title: "Get Firefox!"
};
let expectedTB = {
id: bookmarkGuids.get("http://getthunderbird.com/"),
id: bookmarkRecordIds.get("http://getthunderbird.com/"),
bmkUri: "http://getthunderbird.com/",
title: "Get Thunderbird!"
};
let expectedBookmarks;
if (aReplace) {
if (replace) {
expectedBookmarks = [expectedFX];
} else {
expectedBookmarks = [expectedTB, expectedFX];
@ -366,7 +396,7 @@ async function test_restoreOrImport(aReplace) {
let expectedFolder1 = { title: "Folder 1" };
let expectedFolders;
if (aReplace) {
if (replace) {
expectedFolders = [expectedFolder1];
} else {
expectedFolders = [expectedFolder1, expectedFolder1];
@ -503,7 +533,7 @@ add_task(async function test_bookmark_guidMap_fail() {
let itemPayload = itemRecord.cleartext;
coll.insert(item.guid, encryptPayload(itemPayload));
engine.lastSync = 1; // So we don't back up.
await engine.setLastSync(1); // So we don't back up.
// Make building the GUID map fail.
@ -597,11 +627,9 @@ add_task(async function test_bookmark_tag_but_no_uri() {
await store.update(record);
});
add_task(async function test_misreconciled_root() {
add_bookmark_test(async function test_misreconciled_root(engine) {
_("Ensure that we don't reconcile an arbitrary record with a root.");
let engine = new BookmarksEngine(Service);
await engine.initialize();
let store = engine._store;
let server = await serverForFoo(engine);
await SyncTestingInfrastructure(server);
@ -618,10 +646,9 @@ add_task(async function test_misreconciled_root() {
PlacesUtils.bookmarks.toolbarGuid);
Assert.notEqual(-1, toolbarIDBefore);
let parentGUIDBefore = toolbarBefore.parentid;
let parentIDBefore = await PlacesUtils.promiseItemId(
PlacesSyncUtils.bookmarks.recordIdToGuid(parentGUIDBefore));
Assert.notEqual(-1, parentIDBefore);
let parentRecordIDBefore = toolbarBefore.parentid;
let parentGUIDBefore = PlacesSyncUtils.bookmarks.recordIdToGuid(parentRecordIDBefore);
let parentIDBefore = await PlacesUtils.promiseItemId(parentGUIDBefore);
Assert.equal("string", typeof(parentGUIDBefore));
_("Current parent: " + parentGUIDBefore + " (" + parentIDBefore + ").");
@ -639,15 +666,16 @@ add_task(async function test_misreconciled_root() {
let rec = new FakeRecord(BookmarkFolder, to_apply);
_("Applying record.");
store.applyIncoming(rec);
store.applyIncomingBatch([rec]);
// Ensure that afterwards, toolbar is still there.
// As of 2012-12-05, this only passes because Places doesn't use "toolbar" as
// the real GUID, instead using a generated one. Sync does the translation.
let toolbarAfter = await store.createRecord("toolbar", "bookmarks");
let parentGUIDAfter = toolbarAfter.parentid;
let parentIDAfter = await PlacesUtils.promiseItemId(
PlacesSyncUtils.bookmarks.recordIdToGuid(parentGUIDAfter));
let parentRecordIDAfter = toolbarAfter.parentid;
let parentGUIDAfter = PlacesSyncUtils.bookmarks.recordIdToGuid(
parentRecordIDAfter);
let parentIDAfter = await PlacesUtils.promiseItemId(parentGUIDAfter);
Assert.equal((await PlacesUtils.promiseItemGuid(toolbarIDBefore)),
PlacesUtils.bookmarks.toolbarGuid);
Assert.equal(parentGUIDBefore, parentGUIDAfter);
@ -659,11 +687,9 @@ add_task(async function test_misreconciled_root() {
await promiseStopServer(server);
});
add_task(async function test_sync_dateAdded() {
add_bookmark_test(async function test_sync_dateAdded(engine) {
await Service.recordManager.clearCache();
await PlacesSyncUtils.bookmarks.reset();
let engine = new BookmarksEngine(Service);
await engine.initialize();
let store = engine._store;
let server = await serverForFoo(engine);
await SyncTestingInfrastructure(server);
@ -672,7 +698,7 @@ add_task(async function test_sync_dateAdded() {
// TODO: Avoid random orange (bug 1374599), this is only necessary
// intermittently - reset the last sync date so that we'll get all bookmarks.
engine.lastSync = 1;
await engine.setLastSync(1);
Svc.Obs.notify("weave:engine:start-tracking"); // We skip usual startup...

View File

@ -5,6 +5,7 @@
// many mocks)
Cu.import("resource://gre/modules/Log.jsm");
Cu.import("resource://gre/modules/osfile.jsm");
Cu.import("resource://gre/modules/PlacesSyncUtils.jsm");
Cu.import("resource://services-sync/bookmark_repair.js");
Cu.import("resource://services-sync/constants.js");
Cu.import("resource://services-sync/doctor.js");
@ -12,15 +13,9 @@ Cu.import("resource://services-sync/service.js");
Cu.import("resource://services-sync/engines/clients.js");
Cu.import("resource://services-sync/engines/bookmarks.js");
const LAST_BOOKMARK_SYNC_PREFS = [
"bookmarks.lastSync",
"bookmarks.lastSyncLocal",
];
const BOOKMARK_REPAIR_STATE_PREFS = [
"client.GUID",
"doctor.lastRepairAdvance",
...LAST_BOOKMARK_SYNC_PREFS,
...Object.values(BookmarkRepairRequestor.PREF).map(name =>
`repairs.bookmarks.${name}`
),
@ -31,6 +26,9 @@ let bookmarksEngine;
var recordedEvents = [];
add_task(async function setup() {
await Service.engineManager.unregister("bookmarks");
await Service.engineManager.register(BufferedBookmarksEngine);
clientsEngine = Service.clientsEngine;
clientsEngine.ignoreLastModifiedOnProcessCommands = true;
bookmarksEngine = Service.engineManager.get("bookmarks");
@ -43,7 +41,9 @@ add_task(async function setup() {
});
function checkRecordedEvents(expected, message) {
deepEqual(recordedEvents, expected, message);
// Ignore event telemetry from the merger.
let repairEvents = recordedEvents.filter(event => event.object != "mirror");
deepEqual(repairEvents, expected, message);
// and clear the list so future checks are easier to write.
recordedEvents = [];
}
@ -130,7 +130,16 @@ add_task(async function test_bookmark_repair_integration() {
checkRecordedEvents([], "Should not start repair after first sync");
_("Back up last sync timestamps for remote client");
let restoreRemoteLastBookmarkSync = backupPrefs(LAST_BOOKMARK_SYNC_PREFS);
let buf = await bookmarksEngine._store.ensureOpenMirror();
let metaRows = await buf.db.execute(`
SELECT key, value FROM meta`);
let metaInfos = [];
for (let row of metaRows) {
metaInfos.push({
key: row.getResultByName("key"),
value: row.getResultByName("value"),
});
}
_(`Delete ${bookmarkInfo.guid} locally and on server`);
// Now we will reach into the server and hard-delete the bookmark
@ -140,9 +149,29 @@ add_task(async function test_bookmark_repair_integration() {
await PlacesUtils.bookmarks.remove(bookmarkInfo.guid, {
source: PlacesUtils.bookmarks.SOURCE_SYNC,
});
deepEqual((await bookmarksEngine.pullNewChanges()), {},
deepEqual((await PlacesSyncUtils.bookmarks.pullChanges()), {},
`Should not upload tombstone for ${bookmarkInfo.guid}`);
// Remove the bookmark from the buffer, too.
let itemRows = await buf.db.execute(`
SELECT guid, kind, title, urlId
FROM items
WHERE guid = :guid`,
{ guid: bookmarkInfo.guid });
equal(itemRows.length, 1, `Bookmark ${
bookmarkInfo.guid} should exist in buffer`);
let bufInfos = [];
for (let row of itemRows) {
bufInfos.push({
guid: row.getResultByName("guid"),
kind: row.getResultByName("kind"),
title: row.getResultByName("title"),
urlId: row.getResultByName("urlId"),
});
}
await buf.db.execute(`DELETE FROM items WHERE guid = :guid`,
{ guid: bookmarkInfo.guid });
// sync again - we should have a few problems...
_("Sync again to trigger repair");
validationPromise = promiseValidationDone([
@ -204,7 +233,14 @@ add_task(async function test_bookmark_repair_integration() {
// repair instead of the sync.
bookmarkInfo.source = PlacesUtils.bookmarks.SOURCE_SYNC;
await PlacesUtils.bookmarks.insert(bookmarkInfo);
restoreRemoteLastBookmarkSync();
await buf.db.execute(`
INSERT INTO items(guid, urlId, kind, title)
VALUES(:guid, :urlId, :kind, :title)`,
bufInfos);
await buf.db.execute(`
REPLACE INTO meta(key, value)
VALUES(:key, :value)`,
metaInfos);
_("Sync as remote client");
await Service.sync();
@ -350,9 +386,14 @@ add_task(async function test_repair_client_missing() {
await PlacesUtils.bookmarks.remove(bookmarkInfo.guid, {
source: PlacesUtils.bookmarks.SOURCE_SYNC,
});
// sanity check we aren't going to sync this removal.
do_check_empty((await bookmarksEngine.pullNewChanges()));
// sanity check that the bookmark is not there anymore
// Delete the bookmark from the buffer, too.
let buf = await bookmarksEngine._store.ensureOpenMirror();
await buf.db.execute(`DELETE FROM items WHERE guid = :guid`,
{ guid: bookmarkInfo.guid });
// Ensure we won't upload a tombstone for the removed bookmark.
Assert.deepEqual((await PlacesSyncUtils.bookmarks.pullChanges()), {});
// Ensure the bookmark no longer exists in Places.
Assert.equal(null, await PlacesUtils.bookmarks.fetch(bookmarkInfo.guid));
// sync again - we should have a few problems...
@ -481,6 +522,7 @@ add_task(async function test_repair_server_deleted() {
// Now we will reach into the server and create a tombstone for that bookmark
// but with a last-modified in the past - this way our sync isn't going to
// pick up the record.
_(`Adding server tombstone for ${bookmarkInfo.guid}`);
server.insertWBO("foo", "bookmarks", new ServerWBO(bookmarkInfo.guid, encryptPayload({
id: bookmarkInfo.guid,
deleted: true,

View File

@ -15,7 +15,9 @@ Svc.Prefs.set("engine.bookmarks.validation.enabled", false);
var recordedEvents = [];
function checkRecordedEvents(expected) {
deepEqual(recordedEvents, expected);
// Ignore event telemetry from the merger.
let repairEvents = recordedEvents.filter(event => event.object != "mirror");
deepEqual(repairEvents, expected);
// and clear the list so future checks are easier to write.
recordedEvents = [];
}

View File

@ -444,7 +444,7 @@ add_task(async function test_onItemAdded() {
let syncFolderID = PlacesUtils.bookmarks.createFolder(
PlacesUtils.bookmarks.bookmarksMenuFolder, "Sync Folder",
PlacesUtils.bookmarks.DEFAULT_INDEX);
let syncFolderGUID = await engine._store.GUIDForId(syncFolderID);
let syncFolderGUID = await PlacesUtils.promiseItemGuid(syncFolderID);
await verifyTrackedItems(["menu", syncFolderGUID]);
Assert.equal(tracker.score, SCORE_INCREMENT_XLARGE);
@ -456,7 +456,7 @@ add_task(async function test_onItemAdded() {
CommonUtils.makeURI("https://example.org/sync"),
PlacesUtils.bookmarks.DEFAULT_INDEX,
"Sync Bookmark");
let syncBmkGUID = await engine._store.GUIDForId(syncBmkID);
let syncBmkGUID = await PlacesUtils.promiseItemGuid(syncBmkID);
await verifyTrackedItems([syncFolderGUID, syncBmkGUID]);
Assert.equal(tracker.score, SCORE_INCREMENT_XLARGE);
@ -467,7 +467,7 @@ add_task(async function test_onItemAdded() {
let syncSepID = PlacesUtils.bookmarks.insertSeparator(
PlacesUtils.bookmarks.bookmarksMenuFolder,
PlacesUtils.bookmarks.getItemIndex(syncFolderID));
let syncSepGUID = await engine._store.GUIDForId(syncSepID);
let syncSepGUID = await PlacesUtils.promiseItemGuid(syncSepID);
await verifyTrackedItems(["menu", syncSepGUID]);
Assert.equal(tracker.score, SCORE_INCREMENT_XLARGE);
} finally {
@ -568,7 +568,7 @@ add_task(async function test_onItemChanged_itemDates() {
CommonUtils.makeURI("http://getfirefox.com"),
PlacesUtils.bookmarks.DEFAULT_INDEX,
"Get Firefox!");
let fx_guid = await engine._store.GUIDForId(fx_id);
let fx_guid = await PlacesUtils.promiseItemGuid(fx_id);
_(`Firefox GUID: ${fx_guid}`);
await startTracking();
@ -592,40 +592,6 @@ add_task(async function test_onItemChanged_itemDates() {
}
});
add_task(async function test_onItemChanged_changeBookmarkURI() {
_("Changes to bookmark URIs should be tracked");
try {
await stopTracking();
_("Insert a bookmark");
let bm = await PlacesUtils.bookmarks.insert({
parentGuid: PlacesUtils.bookmarks.menuGuid,
url: "http://getfirefox.com",
title: "Get Firefox!"
});
_(`Firefox GUID: ${bm.guid}`);
_("Set a tracked annotation to make sure we only notify once");
let id = await PlacesUtils.promiseItemId(bm.guid);
PlacesUtils.annotations.setItemAnnotation(
id, PlacesSyncUtils.bookmarks.DESCRIPTION_ANNO, "A test description", 0,
PlacesUtils.annotations.EXPIRE_NEVER);
await startTracking();
_("Change the bookmark's URI");
bm.url = "https://www.mozilla.org/firefox";
bm = await PlacesUtils.bookmarks.update(bm);
await verifyTrackedItems([bm.guid]);
Assert.equal(tracker.score, SCORE_INCREMENT_XLARGE * 2);
} finally {
_("Clean up.");
await cleanup();
}
});
add_task(async function test_onItemTagged() {
_("Items tagged using the synchronous API should be tracked");
@ -636,7 +602,7 @@ add_task(async function test_onItemTagged() {
let folder = PlacesUtils.bookmarks.createFolder(
PlacesUtils.bookmarks.bookmarksMenuFolder, "Parent",
PlacesUtils.bookmarks.DEFAULT_INDEX);
let folderGUID = await engine._store.GUIDForId(folder);
let folderGUID = await PlacesUtils.promiseItemGuid(folder);
_("Folder ID: " + folder);
_("Folder GUID: " + folderGUID);
@ -645,7 +611,7 @@ add_task(async function test_onItemTagged() {
let b = PlacesUtils.bookmarks.insertBookmark(
folder, uri,
PlacesUtils.bookmarks.DEFAULT_INDEX, "Get Firefox!");
let bGUID = await engine._store.GUIDForId(b);
let bGUID = await PlacesUtils.promiseItemGuid(b);
_("New item is " + b);
_("GUID: " + bGUID);
@ -674,12 +640,12 @@ add_task(async function test_onItemUntagged() {
let fx1ID = PlacesUtils.bookmarks.insertBookmark(
PlacesUtils.bookmarks.bookmarksMenuFolder, uri,
PlacesUtils.bookmarks.DEFAULT_INDEX, "Get Firefox!");
let fx1GUID = await engine._store.GUIDForId(fx1ID);
let fx1GUID = await PlacesUtils.promiseItemGuid(fx1ID);
// Different parent and title; same URL.
let fx2ID = PlacesUtils.bookmarks.insertBookmark(
PlacesUtils.bookmarks.toolbarFolder, uri,
PlacesUtils.bookmarks.DEFAULT_INDEX, "Download Firefox");
let fx2GUID = await engine._store.GUIDForId(fx2ID);
let fx2GUID = await PlacesUtils.promiseItemGuid(fx2ID);
PlacesUtils.tagging.tagURI(uri, ["foo"]);
await startTracking();
@ -809,7 +775,7 @@ add_task(async function test_onItemKeywordChanged() {
let b = PlacesUtils.bookmarks.insertBookmark(
folder, uri,
PlacesUtils.bookmarks.DEFAULT_INDEX, "Get Firefox!");
let bGUID = await engine._store.GUIDForId(b);
let bGUID = await PlacesUtils.promiseItemGuid(b);
_("New item is " + b);
_("GUID: " + bGUID);
@ -914,7 +880,7 @@ add_task(async function test_onItemPostDataChanged() {
CommonUtils.makeURI("http://getfirefox.com"),
PlacesUtils.bookmarks.DEFAULT_INDEX,
"Get Firefox!");
let fx_guid = await engine._store.GUIDForId(fx_id);
let fx_guid = await PlacesUtils.promiseItemGuid(fx_id);
_(`Firefox GUID: ${fx_guid}`);
await startTracking();
@ -943,7 +909,7 @@ add_task(async function test_onItemAnnoChanged() {
let b = PlacesUtils.bookmarks.insertBookmark(
folder, CommonUtils.makeURI("http://getfirefox.com"),
PlacesUtils.bookmarks.DEFAULT_INDEX, "Get Firefox!");
let bGUID = await engine._store.GUIDForId(b);
let bGUID = await PlacesUtils.promiseItemGuid(b);
_("New item is " + b);
_("GUID: " + bGUID);
@ -977,7 +943,7 @@ add_task(async function test_onItemAdded_filtered_root() {
PlacesUtils.bookmarks.placesRoot,
"New root",
PlacesUtils.bookmarks.DEFAULT_INDEX);
let rootGUID = await engine._store.GUIDForId(rootID);
let rootGUID = await PlacesUtils.promiseItemGuid(rootID);
_(`New root GUID: ${rootGUID}`);
_("Insert a bookmark underneath the new root");
@ -986,7 +952,7 @@ add_task(async function test_onItemAdded_filtered_root() {
CommonUtils.makeURI("http://getthunderbird.com"),
PlacesUtils.bookmarks.DEFAULT_INDEX,
"Get Thunderbird!");
let untrackedBmkGUID = await engine._store.GUIDForId(untrackedBmkID);
let untrackedBmkGUID = await PlacesUtils.promiseItemGuid(untrackedBmkID);
_(`New untracked bookmark GUID: ${untrackedBmkGUID}`);
_("Insert a bookmark underneath the Places root");
@ -994,7 +960,7 @@ add_task(async function test_onItemAdded_filtered_root() {
PlacesUtils.bookmarks.placesRoot,
CommonUtils.makeURI("http://getfirefox.com"),
PlacesUtils.bookmarks.DEFAULT_INDEX, "Get Firefox!");
let rootBmkGUID = await engine._store.GUIDForId(rootBmkID);
let rootBmkGUID = await PlacesUtils.promiseItemGuid(rootBmkID);
_(`New Places root bookmark GUID: ${rootBmkGUID}`);
_("New root and bookmark should be ignored");
@ -1017,7 +983,7 @@ add_task(async function test_onItemDeleted_filtered_root() {
PlacesUtils.bookmarks.placesRoot,
CommonUtils.makeURI("http://getfirefox.com"),
PlacesUtils.bookmarks.DEFAULT_INDEX, "Get Firefox!");
let rootBmkGUID = await engine._store.GUIDForId(rootBmkID);
let rootBmkGUID = await PlacesUtils.promiseItemGuid(rootBmkID);
_(`New Places root bookmark GUID: ${rootBmkGUID}`);
await startTracking();
@ -1170,14 +1136,14 @@ add_task(async function test_onItemMoved() {
CommonUtils.makeURI("http://getfirefox.com"),
PlacesUtils.bookmarks.DEFAULT_INDEX,
"Get Firefox!");
let fx_guid = await engine._store.GUIDForId(fx_id);
let fx_guid = await PlacesUtils.promiseItemGuid(fx_id);
_("Firefox GUID: " + fx_guid);
let tb_id = PlacesUtils.bookmarks.insertBookmark(
PlacesUtils.bookmarks.bookmarksMenuFolder,
CommonUtils.makeURI("http://getthunderbird.com"),
PlacesUtils.bookmarks.DEFAULT_INDEX,
"Get Thunderbird!");
let tb_guid = await engine._store.GUIDForId(tb_id);
let tb_guid = await PlacesUtils.promiseItemGuid(tb_id);
_("Thunderbird GUID: " + tb_guid);
await startTracking();
@ -1306,21 +1272,21 @@ add_task(async function test_onItemDeleted_removeFolderTransaction() {
PlacesUtils.bookmarks.bookmarksMenuFolder,
"Test folder",
PlacesUtils.bookmarks.DEFAULT_INDEX);
let folder_guid = await engine._store.GUIDForId(folder_id);
let folder_guid = await PlacesUtils.promiseItemGuid(folder_id);
_(`Folder GUID: ${folder_guid}`);
let fx_id = PlacesUtils.bookmarks.insertBookmark(
folder_id,
CommonUtils.makeURI("http://getfirefox.com"),
PlacesUtils.bookmarks.DEFAULT_INDEX,
"Get Firefox!");
let fx_guid = await engine._store.GUIDForId(fx_id);
let fx_guid = await PlacesUtils.promiseItemGuid(fx_id);
_(`Firefox GUID: ${fx_guid}`);
let tb_id = PlacesUtils.bookmarks.insertBookmark(
folder_id,
CommonUtils.makeURI("http://getthunderbird.com"),
PlacesUtils.bookmarks.DEFAULT_INDEX,
"Get Thunderbird!");
let tb_guid = await engine._store.GUIDForId(tb_id);
let tb_guid = await PlacesUtils.promiseItemGuid(tb_id);
_(`Thunderbird GUID: ${tb_guid}`);
await startTracking();
@ -1364,14 +1330,14 @@ add_task(async function test_treeMoved() {
PlacesUtils.bookmarks.bookmarksMenuFolder,
"First test folder",
PlacesUtils.bookmarks.DEFAULT_INDEX);
let folder1_guid = await engine._store.GUIDForId(folder1_id);
let folder1_guid = await PlacesUtils.promiseItemGuid(folder1_id);
// A second folder in the first.
let folder2_id = PlacesUtils.bookmarks.createFolder(
folder1_id,
"Second test folder",
PlacesUtils.bookmarks.DEFAULT_INDEX);
let folder2_guid = await engine._store.GUIDForId(folder2_id);
let folder2_guid = await PlacesUtils.promiseItemGuid(folder2_id);
// Create a couple of bookmarks in the second folder.
PlacesUtils.bookmarks.insertBookmark(
@ -1413,7 +1379,7 @@ add_task(async function test_onItemDeleted() {
CommonUtils.makeURI("http://getthunderbird.com"),
PlacesUtils.bookmarks.DEFAULT_INDEX,
"Get Thunderbird!");
let tb_guid = await engine._store.GUIDForId(tb_id);
let tb_guid = await PlacesUtils.promiseItemGuid(tb_id);
await startTracking();
@ -1553,7 +1519,7 @@ add_task(async function test_onItemDeleted_removeFolderChildren() {
CommonUtils.makeURI("http://getfirefox.com"),
PlacesUtils.bookmarks.DEFAULT_INDEX,
"Get Firefox!");
let fx_guid = await engine._store.GUIDForId(fx_id);
let fx_guid = await PlacesUtils.promiseItemGuid(fx_id);
_(`Firefox GUID: ${fx_guid}`);
let tb_id = PlacesUtils.bookmarks.insertBookmark(
@ -1561,7 +1527,7 @@ add_task(async function test_onItemDeleted_removeFolderChildren() {
CommonUtils.makeURI("http://getthunderbird.com"),
PlacesUtils.bookmarks.DEFAULT_INDEX,
"Get Thunderbird!");
let tb_guid = await engine._store.GUIDForId(tb_id);
let tb_guid = await PlacesUtils.promiseItemGuid(tb_id);
_(`Thunderbird GUID: ${tb_guid}`);
let moz_id = PlacesUtils.bookmarks.insertBookmark(
@ -1570,7 +1536,7 @@ add_task(async function test_onItemDeleted_removeFolderChildren() {
PlacesUtils.bookmarks.DEFAULT_INDEX,
"Mozilla"
);
let moz_guid = await engine._store.GUIDForId(moz_id);
let moz_guid = await PlacesUtils.promiseItemGuid(moz_id);
_(`Mozilla GUID: ${moz_guid}`);
await startTracking();
@ -1595,14 +1561,14 @@ add_task(async function test_onItemDeleted_tree() {
PlacesUtils.bookmarks.bookmarksMenuFolder,
"First test folder",
PlacesUtils.bookmarks.DEFAULT_INDEX);
let folder1_guid = await engine._store.GUIDForId(folder1_id);
let folder1_guid = await PlacesUtils.promiseItemGuid(folder1_id);
// A second folder in the first.
let folder2_id = PlacesUtils.bookmarks.createFolder(
folder1_id,
"Second test folder",
PlacesUtils.bookmarks.DEFAULT_INDEX);
let folder2_guid = await engine._store.GUIDForId(folder2_id);
let folder2_guid = await PlacesUtils.promiseItemGuid(folder2_id);
// Create a couple of bookmarks in the second folder.
let fx_id = PlacesUtils.bookmarks.insertBookmark(
@ -1610,13 +1576,13 @@ add_task(async function test_onItemDeleted_tree() {
CommonUtils.makeURI("http://getfirefox.com"),
PlacesUtils.bookmarks.DEFAULT_INDEX,
"Get Firefox!");
let fx_guid = await engine._store.GUIDForId(fx_id);
let fx_guid = await PlacesUtils.promiseItemGuid(fx_id);
let tb_id = PlacesUtils.bookmarks.insertBookmark(
folder2_id,
CommonUtils.makeURI("http://getthunderbird.com"),
PlacesUtils.bookmarks.DEFAULT_INDEX,
"Get Thunderbird!");
let tb_guid = await engine._store.GUIDForId(tb_id);
let tb_guid = await PlacesUtils.promiseItemGuid(tb_id);
await startTracking();
@ -1630,170 +1596,3 @@ add_task(async function test_onItemDeleted_tree() {
await cleanup();
}
});
add_task(async function test_skip_migration() {
await insertBookmarksToMigrate();
let originalTombstones = await PlacesTestUtils.fetchSyncTombstones();
let originalFields = await PlacesTestUtils.fetchBookmarkSyncFields(
"0gtWTOgYcoJD", "0dbpnMdxKxfg", "r5ouWdPB3l28", "YK5Bdq5MIqL6");
let filePath = OS.Path.join(OS.Constants.Path.profileDir, "weave", "changes",
"bookmarks.json");
_("No tracker file");
{
await Utils.jsonRemove("changes/bookmarks", tracker);
ok(!(await OS.File.exists(filePath)), "Tracker file should not exist");
await tracker._migrateOldEntries();
let fields = await PlacesTestUtils.fetchBookmarkSyncFields(
"0gtWTOgYcoJD", "0dbpnMdxKxfg", "r5ouWdPB3l28", "YK5Bdq5MIqL6");
deepEqual(fields, originalFields,
"Sync fields should not change if tracker file is missing");
let tombstones = await PlacesTestUtils.fetchSyncTombstones();
deepEqual(tombstones, originalTombstones,
"Tombstones should not change if tracker file is missing");
}
_("Existing tracker file; engine disabled");
{
await Utils.jsonSave("changes/bookmarks", tracker, {});
ok(await OS.File.exists(filePath),
"Tracker file should exist before disabled engine migration");
engine.disabled = true;
await tracker._migrateOldEntries();
engine.disabled = false;
let fields = await PlacesTestUtils.fetchBookmarkSyncFields(
"0gtWTOgYcoJD", "0dbpnMdxKxfg", "r5ouWdPB3l28", "YK5Bdq5MIqL6");
deepEqual(fields, originalFields,
"Sync fields should not change on disabled engine migration");
let tombstones = await PlacesTestUtils.fetchSyncTombstones();
deepEqual(tombstones, originalTombstones,
"Tombstones should not change if tracker file is missing");
ok(!(await OS.File.exists(filePath)),
"Tracker file should be deleted after disabled engine migration");
}
_("Existing tracker file; first sync");
{
await Utils.jsonSave("changes/bookmarks", tracker, {});
ok(await OS.File.exists(filePath),
"Tracker file should exist before first sync migration");
engine.lastSync = 0;
await tracker._migrateOldEntries();
let fields = await PlacesTestUtils.fetchBookmarkSyncFields(
"0gtWTOgYcoJD", "0dbpnMdxKxfg", "r5ouWdPB3l28", "YK5Bdq5MIqL6");
deepEqual(fields, originalFields,
"Sync fields should not change on first sync migration");
let tombstones = await PlacesTestUtils.fetchSyncTombstones();
deepEqual(tombstones, originalTombstones,
"Tombstones should not change if tracker file is missing");
ok(!(await OS.File.exists(filePath)),
"Tracker file should be deleted after first sync migration");
}
await cleanup();
});
add_task(async function test_migrate_empty_tracker() {
_("Migration with empty tracker file");
await insertBookmarksToMigrate();
await Utils.jsonSave("changes/bookmarks", tracker, {});
engine.lastSync = Date.now() / 1000;
await tracker._migrateOldEntries();
let fields = await PlacesTestUtils.fetchBookmarkSyncFields(
"0gtWTOgYcoJD", "0dbpnMdxKxfg", "r5ouWdPB3l28", "YK5Bdq5MIqL6");
for (let field of fields) {
equal(field.syncStatus, PlacesUtils.bookmarks.SYNC_STATUS.NORMAL,
`Sync status of migrated bookmark ${field.guid} should be NORMAL`);
strictEqual(field.syncChangeCounter, 0,
`Change counter of migrated bookmark ${field.guid} should be 0`);
}
let tombstones = await PlacesTestUtils.fetchSyncTombstones();
deepEqual(tombstones, [], "Migration should delete old tombstones");
let filePath = OS.Path.join(OS.Constants.Path.profileDir, "weave", "changes",
"bookmarks.json");
ok(!(await OS.File.exists(filePath)),
"Tracker file should be deleted after empty tracker migration");
await cleanup();
});
add_task(async function test_migrate_existing_tracker() {
_("Migration with existing tracker entries");
await insertBookmarksToMigrate();
let mozBmk = await PlacesUtils.bookmarks.fetch("0gtWTOgYcoJD");
let fxBmk = await PlacesUtils.bookmarks.fetch("0dbpnMdxKxfg");
let mozChangeTime = Math.floor(mozBmk.lastModified / 1000) - 60;
let fxChangeTime = Math.floor(fxBmk.lastModified / 1000) + 60;
await Utils.jsonSave("changes/bookmarks", tracker, {
"0gtWTOgYcoJD": mozChangeTime,
"0dbpnMdxKxfg": {
modified: fxChangeTime,
deleted: false,
},
"3kdIPWHs9hHC": {
modified: 1479494951,
deleted: true,
},
"l7DlMy2lL1jL": 1479496460,
});
engine.lastSync = Date.now() / 1000;
await tracker._migrateOldEntries();
let changedFields = await PlacesTestUtils.fetchBookmarkSyncFields(
"0gtWTOgYcoJD", "0dbpnMdxKxfg");
for (let field of changedFields) {
if (field.guid == "0gtWTOgYcoJD") {
ok(field.lastModified.getTime(), mozBmk.lastModified.getTime(),
`Modified time for ${field.guid} should not be reset to older change time`);
} else if (field.guid == "0dbpnMdxKxfg") {
equal(field.lastModified.getTime(), fxChangeTime * 1000,
`Modified time for ${field.guid} should be updated to newer change time`);
}
equal(field.syncStatus, PlacesUtils.bookmarks.SYNC_STATUS.NORMAL,
`Sync status of migrated bookmark ${field.guid} should be NORMAL`);
ok(field.syncChangeCounter > 0,
`Change counter of migrated bookmark ${field.guid} should be > 0`);
}
let unchangedFields = await PlacesTestUtils.fetchBookmarkSyncFields(
"r5ouWdPB3l28", "YK5Bdq5MIqL6");
for (let field of unchangedFields) {
equal(field.syncStatus, PlacesUtils.bookmarks.SYNC_STATUS.NORMAL,
`Sync status of unchanged bookmark ${field.guid} should be NORMAL`);
strictEqual(field.syncChangeCounter, 0,
`Change counter of unchanged bookmark ${field.guid} should be 0`);
}
let tombstones = await PlacesTestUtils.fetchSyncTombstones();
await deepEqual(tombstones, [{
guid: "3kdIPWHs9hHC",
dateRemoved: new Date(1479494951 * 1000),
}, {
guid: "l7DlMy2lL1jL",
dateRemoved: new Date(1479496460 * 1000),
}], "Should write tombstones for deleted tracked items");
let filePath = OS.Path.join(OS.Constants.Path.profileDir, "weave", "changes",
"bookmarks.json");
ok(!(await OS.File.exists(filePath)),
"Tracker file should be deleted after existing tracker migration");
await cleanup();
});

View File

@ -69,7 +69,7 @@ add_task(async function test_history_download_limit() {
let ping = await sync_engine_and_validate_telem(engine, false);
deepEqual(ping.engines[0].incoming, { applied: 5 });
let backlogAfterFirstSync = engine.toFetch.slice(0);
let backlogAfterFirstSync = Array.from(engine.toFetch).sort();
deepEqual(backlogAfterFirstSync, ["place0000000", "place0000001",
"place0000002", "place0000003", "place0000004", "place0000005",
"place0000006", "place0000007", "place0000008", "place0000009"]);
@ -84,7 +84,7 @@ add_task(async function test_history_download_limit() {
// After the second sync, our backlog still contains the same GUIDs: we
// weren't able to make progress on fetching them, since our
// `guidFetchBatchSize` is 0.
let backlogAfterSecondSync = engine.toFetch.slice(0);
let backlogAfterSecondSync = Array.from(engine.toFetch).sort();
deepEqual(backlogAfterFirstSync, backlogAfterSecondSync);
// Now add a newer record to the server.
@ -105,7 +105,7 @@ add_task(async function test_history_download_limit() {
deepEqual(ping.engines[0].incoming, { applied: 1 });
// Our backlog should remain the same.
let backlogAfterThirdSync = engine.toFetch.slice(0);
let backlogAfterThirdSync = Array.from(engine.toFetch).sort();
deepEqual(backlogAfterSecondSync, backlogAfterThirdSync);
equal(engine.lastSync, lastSync + 20);
@ -118,15 +118,16 @@ add_task(async function test_history_download_limit() {
ping = await sync_engine_and_validate_telem(engine, false);
deepEqual(ping.engines[0].incoming, { applied: 5 });
deepEqual(engine.toFetch, ["place0000005", "place0000006", "place0000007",
"place0000008", "place0000009"]);
deepEqual(
Array.from(engine.toFetch).sort(),
["place0000005", "place0000006", "place0000007", "place0000008", "place0000009"]);
// Sync again to clear out the backlog.
engine.lastModified = collection.modified;
ping = await sync_engine_and_validate_telem(engine, false);
deepEqual(ping.engines[0].incoming, { applied: 5 });
deepEqual(engine.toFetch, []);
deepEqual(Array.from(engine.toFetch), []);
await PlacesTestUtils.clearHistory();
});

View File

@ -12,6 +12,16 @@ async function makeSteamEngine() {
return engine;
}
function guidSetOfSize(length) {
return new SerializableSet(
Array.from({ length }, () => Utils.makeGUID()));
}
function assertSetsEqual(a, b) {
// Assert.deepEqual doesn't understand Set.
Assert.deepEqual(Array.from(a).sort(), Array.from(b).sort());
}
async function testSteamEngineStorage(test) {
try {
let setupEngine = await makeSteamEngine();
@ -113,10 +123,10 @@ add_task(async function test_toFetch() {
const filename = "weave/toFetch/steam.json";
await testSteamEngineStorage({
toFetch: [Utils.makeGUID(), Utils.makeGUID(), Utils.makeGUID()],
toFetch: guidSetOfSize(3),
setup(engine) {
// Ensure pristine environment
Assert.equal(engine.toFetch.length, 0);
Assert.equal(engine.toFetch.size, 0);
// Write file to disk
engine.toFetch = this.toFetch;
@ -124,13 +134,13 @@ add_task(async function test_toFetch() {
},
check(engine) {
// toFetch is written asynchronously
Assert.deepEqual(engine.toFetch, this.toFetch);
assertSetsEqual(engine.toFetch, this.toFetch);
},
});
await testSteamEngineStorage({
toFetch: [Utils.makeGUID(), Utils.makeGUID(), Utils.makeGUID(), Utils.makeGUID()],
toFetch2: [Utils.makeGUID(), Utils.makeGUID(), Utils.makeGUID(), Utils.makeGUID(), Utils.makeGUID()],
toFetch: guidSetOfSize(4),
toFetch2: guidSetOfSize(5),
setup(engine) {
// Make sure it work for consecutive writes before the callback is executed.
engine.toFetch = this.toFetch;
@ -140,12 +150,12 @@ add_task(async function test_toFetch() {
Assert.equal(engine.toFetch, this.toFetch2);
},
check(engine) {
Assert.deepEqual(engine.toFetch, this.toFetch2);
assertSetsEqual(engine.toFetch, this.toFetch2);
},
});
await testSteamEngineStorage({
toFetch: [Utils.makeGUID(), Utils.makeGUID()],
toFetch: guidSetOfSize(2),
async beforeCheck() {
let toFetchPath = OS.Path.join(OS.Constants.Path.profileDir, filename);
let bytes = new TextEncoder().encode(JSON.stringify(this.toFetch));
@ -154,7 +164,7 @@ add_task(async function test_toFetch() {
},
check(engine) {
// Read file from disk
Assert.deepEqual(engine.toFetch, this.toFetch);
assertSetsEqual(engine.toFetch, this.toFetch);
},
});
});
@ -165,10 +175,10 @@ add_task(async function test_previousFailed() {
const filename = "weave/failed/steam.json";
await testSteamEngineStorage({
previousFailed: [Utils.makeGUID(), Utils.makeGUID(), Utils.makeGUID()],
previousFailed: guidSetOfSize(3),
setup(engine) {
// Ensure pristine environment
Assert.equal(engine.previousFailed.length, 0);
Assert.equal(engine.previousFailed.size, 0);
// Write file to disk
engine.previousFailed = this.previousFailed;
@ -176,13 +186,13 @@ add_task(async function test_previousFailed() {
},
check(engine) {
// previousFailed is written asynchronously
Assert.deepEqual(engine.previousFailed, this.previousFailed);
assertSetsEqual(engine.previousFailed, this.previousFailed);
},
});
await testSteamEngineStorage({
previousFailed: [Utils.makeGUID(), Utils.makeGUID(), Utils.makeGUID(), Utils.makeGUID()],
previousFailed2: [Utils.makeGUID(), Utils.makeGUID(), Utils.makeGUID(), Utils.makeGUID(), Utils.makeGUID()],
previousFailed: guidSetOfSize(4),
previousFailed2: guidSetOfSize(5),
setup(engine) {
// Make sure it work for consecutive writes before the callback is executed.
engine.previousFailed = this.previousFailed;
@ -192,12 +202,12 @@ add_task(async function test_previousFailed() {
Assert.equal(engine.previousFailed, this.previousFailed2);
},
check(engine) {
Assert.deepEqual(engine.previousFailed, this.previousFailed2);
assertSetsEqual(engine.previousFailed, this.previousFailed2);
},
});
await testSteamEngineStorage({
previousFailed: [Utils.makeGUID(), Utils.makeGUID()],
previousFailed: guidSetOfSize(2),
async beforeCheck() {
let previousFailedPath = OS.Path.join(OS.Constants.Path.profileDir,
filename);
@ -207,7 +217,7 @@ add_task(async function test_previousFailed() {
},
check(engine) {
// Read file from disk
Assert.deepEqual(engine.previousFailed, this.previousFailed);
assertSetsEqual(engine.previousFailed, this.previousFailed);
},
});
});
@ -220,18 +230,18 @@ add_task(async function test_resetClient() {
// Ensure pristine environment
Assert.equal(Svc.Prefs.get("steam.lastSync"), undefined);
Assert.equal(Svc.Prefs.get("steam.lastSyncLocal"), undefined);
Assert.equal(engine.toFetch.length, 0);
Assert.equal(engine.toFetch.size, 0);
engine.lastSync = 123.45;
engine.lastSyncLocal = 67890;
engine.toFetch = [Utils.makeGUID(), Utils.makeGUID(), Utils.makeGUID()];
engine.previousFailed = [Utils.makeGUID(), Utils.makeGUID(), Utils.makeGUID()];
engine.toFetch = guidSetOfSize(4);
engine.previousFailed = guidSetOfSize(3);
await engine.resetClient();
Assert.equal(engine.lastSync, 0);
Assert.equal(engine.lastSyncLocal, 0);
Assert.equal(engine.toFetch.length, 0);
Assert.equal(engine.previousFailed.length, 0);
Assert.equal(engine.toFetch.size, 0);
Assert.equal(engine.previousFailed.size, 0);
} finally {
Svc.Prefs.resetBranch("");
}
@ -252,13 +262,13 @@ add_task(async function test_wipeServer() {
try {
// Some data to reset.
engine.lastSync = 123.45;
engine.toFetch = [Utils.makeGUID(), Utils.makeGUID(), Utils.makeGUID()];
engine.toFetch = guidSetOfSize(3),
_("Wipe server data and reset client.");
await engine.wipeServer();
Assert.equal(steamCollection.payload, undefined);
Assert.equal(engine.lastSync, 0);
Assert.equal(engine.toFetch.length, 0);
Assert.equal(engine.toFetch.size, 0);
} finally {
steamServer.stop(do_test_finished);

View File

@ -627,8 +627,8 @@ add_task(async function test_processIncoming_resume_toFetch() {
// Time travel 10 seconds into the future but still download the above WBOs.
let engine = makeRotaryEngine();
engine.lastSync = LASTSYNC;
engine.toFetch = ["flying", "scotsman"];
engine.previousFailed = ["failed0", "failed1", "failed2"];
engine.toFetch = new SerializableSet(["flying", "scotsman"]);
engine.previousFailed = new SerializableSet(["failed0", "failed1", "failed2"]);
let server = sync_httpd_setup({
"/1.1/foo/storage/rotary": collection.handler()
@ -657,7 +657,7 @@ add_task(async function test_processIncoming_resume_toFetch() {
Assert.equal(engine._store.items.failed0, "Record No. 0");
Assert.equal(engine._store.items.failed1, "Record No. 1");
Assert.equal(engine._store.items.failed2, "Record No. 2");
Assert.equal(engine.previousFailed.length, 0);
Assert.equal(engine.previousFailed.size, 0);
} finally {
await cleanAndGo(engine, server);
}
@ -703,8 +703,8 @@ add_task(async function test_processIncoming_notify_count() {
try {
// Confirm initial environment.
Assert.equal(engine.lastSync, 0);
Assert.equal(engine.toFetch.length, 0);
Assert.equal(engine.previousFailed.length, 0);
Assert.equal(engine.toFetch.size, 0);
Assert.equal(engine.previousFailed.size, 0);
do_check_empty(engine._store.items);
let called = 0;
@ -722,8 +722,8 @@ add_task(async function test_processIncoming_notify_count() {
// Confirm failures.
do_check_attribute_count(engine._store.items, 12);
Assert.deepEqual(engine.previousFailed, ["record-no-00", "record-no-05",
"record-no-10"]);
Assert.deepEqual(Array.from(engine.previousFailed).sort(),
["record-no-00", "record-no-05", "record-no-10"].sort());
// There are newly failed records and they are reported.
Assert.equal(called, 1);
@ -737,7 +737,7 @@ add_task(async function test_processIncoming_notify_count() {
// Confirming removed failures.
do_check_attribute_count(engine._store.items, 14);
Assert.deepEqual(engine.previousFailed, ["record-no-00"]);
Assert.deepEqual(Array.from(engine.previousFailed), ["record-no-00"]);
Assert.equal(called, 2);
Assert.equal(counts.failed, 1);
@ -792,12 +792,12 @@ add_task(async function test_processIncoming_previousFailed() {
try {
// Confirm initial environment.
Assert.equal(engine.lastSync, 0);
Assert.equal(engine.toFetch.length, 0);
Assert.equal(engine.previousFailed.length, 0);
Assert.equal(engine.toFetch.size, 0);
Assert.equal(engine.previousFailed.size, 0);
do_check_empty(engine._store.items);
// Initial failed items in previousFailed to be reset.
let previousFailed = [Utils.makeGUID(), Utils.makeGUID(), Utils.makeGUID()];
let previousFailed = new SerializableSet([Utils.makeGUID(), Utils.makeGUID(), Utils.makeGUID()]);
engine.previousFailed = previousFailed;
Assert.equal(engine.previousFailed, previousFailed);
@ -807,9 +807,10 @@ add_task(async function test_processIncoming_previousFailed() {
// Expected result: 4 sync batches with 2 failures each => 8 failures
do_check_attribute_count(engine._store.items, 6);
Assert.deepEqual(engine.previousFailed, ["record-no-00", "record-no-01",
"record-no-04", "record-no-05", "record-no-08", "record-no-09",
"record-no-12", "record-no-13"]);
Assert.deepEqual(
Array.from(engine.previousFailed).sort(),
["record-no-00", "record-no-01", "record-no-04", "record-no-05",
"record-no-08", "record-no-09", "record-no-12", "record-no-13"].sort());
// Sync again with the same failed items (records 0, 1, 8, 9).
await engine._processIncoming();
@ -817,8 +818,9 @@ add_task(async function test_processIncoming_previousFailed() {
// A second sync with the same failed items should not add the same items again.
// Items that did not fail a second time should no longer be in previousFailed.
do_check_attribute_count(engine._store.items, 10);
Assert.deepEqual(engine.previousFailed, ["record-no-00", "record-no-01",
"record-no-08", "record-no-09"]);
Assert.deepEqual(
Array.from(engine.previousFailed).sort(),
["record-no-00", "record-no-01", "record-no-08", "record-no-09"].sort());
// Refetched items that didn't fail the second time are in engine._store.items.
Assert.equal(engine._store.items["record-no-04"], "Record No. 4");
@ -900,8 +902,8 @@ add_task(async function test_processIncoming_failed_records() {
// Confirm initial environment
Assert.equal(engine.lastSync, 0);
Assert.equal(engine.toFetch.length, 0);
Assert.equal(engine.previousFailed.length, 0);
Assert.equal(engine.toFetch.size, 0);
Assert.equal(engine.previousFailed.size, 0);
do_check_empty(engine._store.items);
let observerSubject;
@ -920,8 +922,8 @@ add_task(async function test_processIncoming_failed_records() {
NUMBER_OF_RECORDS - BOGUS_RECORDS.length);
// Ensure that the bogus records will be fetched again on the next sync.
Assert.equal(engine.previousFailed.length, BOGUS_RECORDS.length);
Assert.deepEqual(engine.previousFailed.sort(), BOGUS_RECORDS.sort());
Assert.equal(engine.previousFailed.size, BOGUS_RECORDS.length);
Assert.deepEqual(Array.from(engine.previousFailed).sort(), BOGUS_RECORDS.sort());
// Ensure the observer was notified
Assert.equal(observerData, engine.name);
@ -999,8 +1001,8 @@ add_task(async function test_processIncoming_decrypt_failed() {
try {
// Confirm initial state
Assert.equal(engine.toFetch.length, 0);
Assert.equal(engine.previousFailed.length, 0);
Assert.equal(engine.toFetch.size, 0);
Assert.equal(engine.previousFailed.size, 0);
let observerSubject;
let observerData;
@ -1016,11 +1018,11 @@ add_task(async function test_processIncoming_decrypt_failed() {
Assert.equal(ping.engines[0].incoming.failed, 4);
Assert.equal(ping.engines[0].incoming.newFailed, 4);
Assert.equal(engine.previousFailed.length, 4);
Assert.equal(engine.previousFailed[0], "nojson");
Assert.equal(engine.previousFailed[1], "nojson2");
Assert.equal(engine.previousFailed[2], "nodecrypt");
Assert.equal(engine.previousFailed[3], "nodecrypt2");
Assert.equal(engine.previousFailed.size, 4);
Assert.ok(engine.previousFailed.has("nojson"));
Assert.ok(engine.previousFailed.has("nojson2"));
Assert.ok(engine.previousFailed.has("nodecrypt"));
Assert.ok(engine.previousFailed.has("nodecrypt2"));
// Ensure the observer was notified
Assert.equal(observerData, engine.name);

View File

@ -122,7 +122,7 @@ add_task(async function test_processIncoming_error() {
// Make the 10 minutes old so it will only be synced in the toFetch phase.
bogus_record.modified = Date.now() / 1000 - 60 * 10;
engine.lastSync = Date.now() / 1000 - 60;
engine.toFetch = [BOGUS_GUID];
engine.toFetch = new SerializableSet([BOGUS_GUID]);
let error, pingPayload, fullPing;
try {
@ -182,6 +182,7 @@ add_task(async function test_uploading() {
title: "New Title",
});
await store.wipe();
await engine.resetClient();
ping = await sync_engine_and_validate_telem(engine, false);

View File

@ -1194,8 +1194,6 @@ impl LayoutThread {
debug!("Doc sheets changed, flushing author sheets too");
self.stylist.force_stylesheet_origins_dirty(Origin::Author.into());
}
self.stylist.flush(&guards, Some(element));
}
if viewport_size_changed {
@ -1246,6 +1244,8 @@ impl LayoutThread {
debug!("Noting restyle for {:?}: {:?}", el, style_data);
}
self.stylist.flush(&guards, Some(element), Some(&map));
// Create a layout context for use throughout the following passes.
let mut layout_context =
self.build_layout_context(guards.clone(), true, &map);

View File

@ -8,6 +8,7 @@ use dom::attr::Attr;
use dom::bindings::cell::DomRefCell;
use dom::bindings::codegen::Bindings::EventBinding::EventMethods;
use dom::bindings::codegen::Bindings::FileListBinding::FileListMethods;
use dom::bindings::codegen::Bindings::HTMLFormElementBinding::SelectionMode;
use dom::bindings::codegen::Bindings::HTMLInputElementBinding;
use dom::bindings::codegen::Bindings::HTMLInputElementBinding::HTMLInputElementMethods;
use dom::bindings::codegen::Bindings::KeyboardEventBinding::KeyboardEventMethods;
@ -52,7 +53,7 @@ use std::ops::Range;
use style::attr::AttrValue;
use style::element_state::ElementState;
use style::str::split_commas;
use textinput::{Direction, Selection, SelectionDirection, TextInput};
use textinput::{Direction, SelectionDirection, TextInput};
use textinput::KeyReaction::{DispatchInput, Nothing, RedrawSelection, TriggerDefaultAction};
use textinput::Lines::Single;
@ -188,7 +189,6 @@ pub struct HTMLInputElement {
input_type: Cell<InputType>,
checked_changed: Cell<bool>,
placeholder: DomRefCell<DOMString>,
value_changed: Cell<bool>,
size: Cell<u32>,
maxlength: Cell<i32>,
minlength: Cell<i32>,
@ -244,7 +244,6 @@ impl HTMLInputElement {
input_type: Cell::new(Default::default()),
placeholder: DomRefCell::new(DOMString::new()),
checked_changed: Cell::new(false),
value_changed: Cell::new(false),
maxlength: Cell::new(DEFAULT_MAX_LENGTH),
minlength: Cell::new(DEFAULT_MIN_LENGTH),
size: Cell::new(DEFAULT_INPUT_SIZE),
@ -374,7 +373,7 @@ impl LayoutHTMLInputElementHelpers for LayoutDom<HTMLInputElement> {
match (*self.unsafe_get()).input_type() {
InputType::Password => {
let text = get_raw_textinput_value(self);
let sel = textinput.get_absolute_selection_range();
let sel = textinput.sorted_selection_offsets_range();
// Translate indices from the raw value to indices in the replacement value.
let char_start = text[.. sel.start].chars().count();
@ -383,7 +382,7 @@ impl LayoutHTMLInputElementHelpers for LayoutDom<HTMLInputElement> {
let bytes_per_char = PASSWORD_REPLACEMENT_CHAR.len_utf8();
Some(char_start * bytes_per_char .. char_end * bytes_per_char)
}
input_type if input_type.is_textual() => Some(textinput.get_absolute_selection_range()),
input_type if input_type.is_textual() => Some(textinput.sorted_selection_offsets_range()),
_ => None
}
}
@ -417,6 +416,35 @@ impl TextControl for HTMLInputElement {
_ => false
}
}
// https://html.spec.whatwg.org/multipage/#concept-input-apply
//
// Defines input types to which the select() IDL method applies. These are a superset of the
// types for which selection_api_applies() returns true.
//
// Types omitted which could theoretically be included if they were
// rendered as a text control: file
fn has_selectable_text(&self) -> bool {
match self.input_type() {
InputType::Text | InputType::Search | InputType::Url
| InputType::Tel | InputType::Password | InputType::Email
| InputType::Date | InputType::Month | InputType::Week
| InputType::Time | InputType::DatetimeLocal | InputType::Number
| InputType::Color => {
true
}
InputType::Button | InputType::Checkbox | InputType::File
| InputType::Hidden | InputType::Image | InputType::Radio
| InputType::Range | InputType::Reset | InputType::Submit => {
false
}
}
}
fn set_dirty_value_flag(&self, value: bool) {
self.value_dirty.set(value)
}
}
impl HTMLInputElementMethods for HTMLInputElement {
@ -538,8 +566,7 @@ impl HTMLInputElementMethods for HTMLInputElement {
self.sanitize_value();
// Step 5.
if *self.textinput.borrow().single_line_content() != old_value {
self.textinput.borrow_mut()
.adjust_horizontal_to_limit(Direction::Forward, Selection::NotSelected);
self.textinput.borrow_mut().clear_selection_to_limit(Direction::Forward);
}
}
ValueMode::Default |
@ -557,7 +584,6 @@ impl HTMLInputElementMethods for HTMLInputElement {
}
}
self.value_changed.set(true);
self.upcast::<Node>().dirty(NodeDamage::OtherNodeDamage);
Ok(())
}
@ -687,6 +713,11 @@ impl HTMLInputElementMethods for HTMLInputElement {
}
}
// https://html.spec.whatwg.org/multipage/#dom-textarea/input-select
fn Select(&self) {
self.dom_select(); // defined in TextControl trait
}
// https://html.spec.whatwg.org/multipage/#dom-textarea/input-selectionstart
fn GetSelectionStart(&self) -> Option<u32> {
self.get_dom_selection_start()
@ -722,6 +753,19 @@ impl HTMLInputElementMethods for HTMLInputElement {
self.set_dom_selection_range(start, end, direction)
}
// https://html.spec.whatwg.org/multipage/#dom-textarea/input-setrangetext
fn SetRangeText(&self, replacement: DOMString) -> ErrorResult {
// defined in TextControl trait
self.set_dom_range_text(replacement, None, None, Default::default())
}
// https://html.spec.whatwg.org/multipage/#dom-textarea/input-setrangetext
fn SetRangeText_(&self, replacement: DOMString, start: u32, end: u32,
selection_mode: SelectionMode) -> ErrorResult {
// defined in TextControl trait
self.set_dom_range_text(replacement, Some(start), Some(end), selection_mode)
}
// Select the files based on filepaths passed in,
// enabled by dom.htmlinputelement.select_files.enabled,
// used for test purpose.
@ -902,7 +946,6 @@ impl HTMLInputElement {
self.SetValue(self.DefaultValue())
.expect("Failed to reset input value to default.");
self.value_dirty.set(false);
self.value_changed.set(false);
self.upcast::<Node>().dirty(NodeDamage::OtherNodeDamage);
}
@ -1116,6 +1159,8 @@ impl VirtualMethods for HTMLInputElement {
// https://html.spec.whatwg.org/multipage/#input-type-change
let (old_value_mode, old_idl_value) = (self.value_mode(), self.Value());
let previously_selectable = self.selection_api_applies();
self.input_type.set(new_type);
if new_type.is_textual() {
@ -1167,6 +1212,11 @@ impl VirtualMethods for HTMLInputElement {
// Step 6
self.sanitize_value();
// Steps 7-9
if !previously_selectable && self.selection_api_applies() {
self.textinput.borrow_mut().clear_selection_to_limit(Direction::Backward);
}
},
AttributeMutation::Removed => {
if self.input_type() == InputType::Radio {
@ -1184,7 +1234,7 @@ impl VirtualMethods for HTMLInputElement {
self.update_placeholder_shown_state();
},
&local_name!("value") if !self.value_changed.get() => {
&local_name!("value") if !self.value_dirty.get() => {
let value = mutation.new_value(attr).map(|value| (**value).to_owned());
self.textinput.borrow_mut().set_content(
value.map_or(DOMString::new(), DOMString::from));
@ -1327,7 +1377,7 @@ impl VirtualMethods for HTMLInputElement {
keyevent.MetaKey());
},
DispatchInput => {
self.value_changed.set(true);
self.value_dirty.set(true);
self.update_placeholder_shown_state();
self.upcast::<Node>().dirty(NodeDamage::OtherNodeDamage);
event.mark_as_handled();

View File

@ -5,6 +5,7 @@
use dom::attr::Attr;
use dom::bindings::cell::DomRefCell;
use dom::bindings::codegen::Bindings::EventBinding::EventMethods;
use dom::bindings::codegen::Bindings::HTMLFormElementBinding::SelectionMode;
use dom::bindings::codegen::Bindings::HTMLTextAreaElementBinding;
use dom::bindings::codegen::Bindings::HTMLTextAreaElementBinding::HTMLTextAreaElementMethods;
use dom::bindings::codegen::Bindings::NodeBinding::NodeMethods;
@ -35,7 +36,7 @@ use std::default::Default;
use std::ops::Range;
use style::attr::AttrValue;
use style::element_state::ElementState;
use textinput::{Direction, KeyReaction, Lines, Selection, SelectionDirection, TextInput};
use textinput::{Direction, KeyReaction, Lines, SelectionDirection, TextInput};
#[dom_struct]
pub struct HTMLTextAreaElement {
@ -44,7 +45,7 @@ pub struct HTMLTextAreaElement {
textinput: DomRefCell<TextInput<ScriptToConstellationChan>>,
placeholder: DomRefCell<DOMString>,
// https://html.spec.whatwg.org/multipage/#concept-textarea-dirty
value_changed: Cell<bool>,
value_dirty: Cell<bool>,
form_owner: MutNullableDom<HTMLFormElement>,
}
@ -81,7 +82,7 @@ impl LayoutHTMLTextAreaElementHelpers for LayoutDom<HTMLTextAreaElement> {
return None;
}
let textinput = (*self.unsafe_get()).textinput.borrow_for_layout();
Some(textinput.get_absolute_selection_range())
Some(textinput.sorted_selection_offsets_range())
}
#[allow(unsafe_code)]
@ -122,7 +123,7 @@ impl HTMLTextAreaElement {
placeholder: DomRefCell::new(DOMString::new()),
textinput: DomRefCell::new(TextInput::new(
Lines::Multiple, DOMString::new(), chan, None, None, SelectionDirection::None)),
value_changed: Cell::new(false),
value_dirty: Cell::new(false),
form_owner: Default::default(),
}
}
@ -152,6 +153,14 @@ impl TextControl for HTMLTextAreaElement {
fn selection_api_applies(&self) -> bool {
true
}
fn has_selectable_text(&self) -> bool {
true
}
fn set_dirty_value_flag(&self, value: bool) {
self.value_dirty.set(value)
}
}
impl HTMLTextAreaElementMethods for HTMLTextAreaElement {
@ -227,7 +236,7 @@ impl HTMLTextAreaElementMethods for HTMLTextAreaElement {
// if the element's dirty value flag is false, then the element's
// raw value must be set to the value of the element's textContent IDL attribute
if !self.value_changed.get() {
if !self.value_dirty.get() {
self.reset();
}
}
@ -243,19 +252,19 @@ impl HTMLTextAreaElementMethods for HTMLTextAreaElement {
// Step 1
let old_value = textinput.get_content();
let old_selection = textinput.selection_begin;
let old_selection = textinput.selection_origin;
// Step 2
textinput.set_content(value);
// Step 3
self.value_changed.set(true);
self.value_dirty.set(true);
if old_value != textinput.get_content() {
// Step 4
textinput.adjust_horizontal_to_limit(Direction::Forward, Selection::NotSelected);
textinput.clear_selection_to_limit(Direction::Forward);
} else {
textinput.selection_begin = old_selection;
textinput.selection_origin = old_selection;
}
self.upcast::<Node>().dirty(NodeDamage::OtherNodeDamage);
@ -266,6 +275,11 @@ impl HTMLTextAreaElementMethods for HTMLTextAreaElement {
self.upcast::<HTMLElement>().labels()
}
// https://html.spec.whatwg.org/multipage/#dom-textarea/input-select
fn Select(&self) {
self.dom_select(); // defined in TextControl trait
}
// https://html.spec.whatwg.org/multipage/#dom-textarea/input-selectionstart
fn GetSelectionStart(&self) -> Option<u32> {
self.get_dom_selection_start()
@ -300,6 +314,19 @@ impl HTMLTextAreaElementMethods for HTMLTextAreaElement {
fn SetSelectionRange(&self, start: u32, end: u32, direction: Option<DOMString>) -> ErrorResult {
self.set_dom_selection_range(start, end, direction)
}
// https://html.spec.whatwg.org/multipage/#dom-textarea/input-setrangetext
fn SetRangeText(&self, replacement: DOMString) -> ErrorResult {
// defined in TextControl trait
self.set_dom_range_text(replacement, None, None, Default::default())
}
// https://html.spec.whatwg.org/multipage/#dom-textarea/input-setrangetext
fn SetRangeText_(&self, replacement: DOMString, start: u32, end: u32,
selection_mode: SelectionMode) -> ErrorResult {
// defined in TextControl trait
self.set_dom_range_text(replacement, Some(start), Some(end), selection_mode)
}
}
@ -307,7 +334,7 @@ impl HTMLTextAreaElement {
pub fn reset(&self) {
// https://html.spec.whatwg.org/multipage/#the-textarea-element:concept-form-reset-control
self.SetValue(self.DefaultValue());
self.value_changed.set(false);
self.value_dirty.set(false);
}
}
@ -400,7 +427,7 @@ impl VirtualMethods for HTMLTextAreaElement {
if let Some(ref s) = self.super_type() {
s.children_changed(mutation);
}
if !self.value_changed.get() {
if !self.value_dirty.get() {
self.reset();
}
}
@ -423,7 +450,7 @@ impl VirtualMethods for HTMLTextAreaElement {
match action {
KeyReaction::TriggerDefaultAction => (),
KeyReaction::DispatchInput => {
self.value_changed.set(true);
self.value_dirty.set(true);
self.update_placeholder_shown_state();
self.upcast::<Node>().dirty(NodeDamage::OtherNodeDamage);
event.mark_as_handled();

View File

@ -3,6 +3,7 @@
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use dom::bindings::cell::DomRefCell;
use dom::bindings::codegen::Bindings::HTMLFormElementBinding::SelectionMode;
use dom::bindings::conversions::DerivedFrom;
use dom::bindings::error::{Error, ErrorResult};
use dom::bindings::str::DOMString;
@ -10,11 +11,24 @@ use dom::event::{EventBubbles, EventCancelable};
use dom::eventtarget::EventTarget;
use dom::node::{Node, NodeDamage, window_from_node};
use script_traits::ScriptToConstellationChan;
use textinput::{SelectionDirection, TextInput};
use textinput::{SelectionDirection, SelectionState, TextInput};
pub trait TextControl: DerivedFrom<EventTarget> + DerivedFrom<Node> {
fn textinput(&self) -> &DomRefCell<TextInput<ScriptToConstellationChan>>;
fn selection_api_applies(&self) -> bool;
fn has_selectable_text(&self) -> bool;
fn set_dirty_value_flag(&self, value: bool);
// https://html.spec.whatwg.org/multipage/#dom-textarea/input-select
fn dom_select(&self) {
// Step 1
if !self.has_selectable_text() {
return;
}
// Step 2
self.set_selection_range(Some(0), Some(u32::max_value()), None, None);
}
// https://html.spec.whatwg.org/multipage/#dom-textarea/input-selectionstart
fn get_dom_selection_start(&self) -> Option<u32> {
@ -45,7 +59,7 @@ pub trait TextControl: DerivedFrom<EventTarget> + DerivedFrom<Node> {
}
// Step 4
self.set_selection_range(start, Some(end), Some(self.selection_direction()));
self.set_selection_range(start, Some(end), Some(self.selection_direction()), None);
Ok(())
}
@ -68,7 +82,7 @@ pub trait TextControl: DerivedFrom<EventTarget> + DerivedFrom<Node> {
}
// Step 2
self.set_selection_range(Some(self.selection_start()), end, Some(self.selection_direction()));
self.set_selection_range(Some(self.selection_start()), end, Some(self.selection_direction()), None);
Ok(())
}
@ -93,7 +107,8 @@ pub trait TextControl: DerivedFrom<EventTarget> + DerivedFrom<Node> {
self.set_selection_range(
Some(self.selection_start()),
Some(self.selection_end()),
direction.map(|d| SelectionDirection::from(d))
direction.map(|d| SelectionDirection::from(d)),
None
);
Ok(())
}
@ -106,16 +121,125 @@ pub trait TextControl: DerivedFrom<EventTarget> + DerivedFrom<Node> {
}
// Step 2
self.set_selection_range(Some(start), Some(end), direction.map(|d| SelectionDirection::from(d)));
self.set_selection_range(Some(start), Some(end), direction.map(|d| SelectionDirection::from(d)), None);
Ok(())
}
// https://html.spec.whatwg.org/multipage/#dom-textarea/input-setrangetext
fn set_dom_range_text(&self, replacement: DOMString, start: Option<u32>, end: Option<u32>,
selection_mode: SelectionMode) -> ErrorResult {
// Step 1
if !self.selection_api_applies() {
return Err(Error::InvalidState);
}
// Step 2
self.set_dirty_value_flag(true);
// Step 3
let mut start = start.unwrap_or_else(|| self.selection_start());
let mut end = end.unwrap_or_else(|| self.selection_end());
// Step 4
if start > end {
return Err(Error::IndexSize);
}
// Save the original selection state to later pass to set_selection_range, because we will
// change the selection state in order to replace the text in the range.
let original_selection_state = self.textinput().borrow().selection_state();
let content_length = self.textinput().borrow().len() as u32;
// Step 5
if start > content_length {
start = content_length;
}
// Step 6
if end > content_length {
end = content_length;
}
// Step 7
let mut selection_start = self.selection_start();
// Step 8
let mut selection_end = self.selection_end();
// Step 11
// Must come before the textinput.replace_selection() call, as replacement gets moved in
// that call.
let new_length = replacement.len() as u32;
{
let mut textinput = self.textinput().borrow_mut();
// Steps 9-10
textinput.set_selection_range(start, end, SelectionDirection::None);
textinput.replace_selection(replacement);
}
// Step 12
let new_end = start + new_length;
// Step 13
match selection_mode {
SelectionMode::Select => {
selection_start = start;
selection_end = new_end;
},
SelectionMode::Start => {
selection_start = start;
selection_end = start;
},
SelectionMode::End => {
selection_start = new_end;
selection_end = new_end;
},
SelectionMode::Preserve => {
// Sub-step 1
let old_length = end - start;
// Sub-step 2
let delta = (new_length as isize) - (old_length as isize);
// Sub-step 3
if selection_start > end {
selection_start = ((selection_start as isize) + delta) as u32;
} else if selection_start > start {
selection_start = start;
}
// Sub-step 4
if selection_end > end {
selection_end = ((selection_end as isize) + delta) as u32;
} else if selection_end > start {
selection_end = new_end;
}
},
}
// Step 14
self.set_selection_range(
Some(selection_start),
Some(selection_end),
None,
Some(original_selection_state)
);
Ok(())
}
fn selection_start(&self) -> u32 {
self.textinput().borrow().get_selection_start()
self.textinput().borrow().selection_start_offset() as u32
}
fn selection_end(&self) -> u32 {
self.textinput().borrow().get_absolute_insertion_point() as u32
self.textinput().borrow().selection_end_offset() as u32
}
fn selection_direction(&self) -> SelectionDirection {
@ -123,7 +247,11 @@ pub trait TextControl: DerivedFrom<EventTarget> + DerivedFrom<Node> {
}
// https://html.spec.whatwg.org/multipage/#set-the-selection-range
fn set_selection_range(&self, start: Option<u32>, end: Option<u32>, direction: Option<SelectionDirection>) {
fn set_selection_range(&self, start: Option<u32>, end: Option<u32>, direction: Option<SelectionDirection>,
original_selection_state: Option<SelectionState>) {
let mut textinput = self.textinput().borrow_mut();
let original_selection_state = original_selection_state.unwrap_or_else(|| textinput.selection_state());
// Step 1
let start = start.unwrap_or(0);
@ -131,16 +259,18 @@ pub trait TextControl: DerivedFrom<EventTarget> + DerivedFrom<Node> {
let end = end.unwrap_or(0);
// Steps 3-5
self.textinput().borrow_mut().set_selection_range(start, end, direction.unwrap_or(SelectionDirection::None));
textinput.set_selection_range(start, end, direction.unwrap_or(SelectionDirection::None));
// Step 6
let window = window_from_node(self);
let _ = window.user_interaction_task_source().queue_event(
&self.upcast::<EventTarget>(),
atom!("select"),
EventBubbles::Bubbles,
EventCancelable::NotCancelable,
&window);
if textinput.selection_state() != original_selection_state {
let window = window_from_node(self);
window.user_interaction_task_source().queue_event(
&self.upcast::<EventTarget>(),
atom!("select"),
EventBubbles::Bubbles,
EventCancelable::NotCancelable,
&window);
}
self.upcast::<Node>().dirty(NodeDamage::OtherNodeDamage);
}

View File

@ -35,3 +35,11 @@ interface HTMLFormElement : HTMLElement {
//boolean checkValidity();
//boolean reportValidity();
};
// https://html.spec.whatwg.org/multipage/#selectionmode
enum SelectionMode {
"preserve", // default
"select",
"start",
"end"
};

View File

@ -89,16 +89,18 @@ interface HTMLInputElement : HTMLElement {
readonly attribute NodeList labels;
//void select();
void select();
[SetterThrows]
attribute unsigned long? selectionStart;
[SetterThrows]
attribute unsigned long? selectionEnd;
[SetterThrows]
attribute DOMString? selectionDirection;
//void setRangeText(DOMString replacement);
//void setRangeText(DOMString replacement, unsigned long start, unsigned long end,
// optional SelectionMode selectionMode = "preserve");
[Throws]
void setRangeText(DOMString replacement);
[Throws]
void setRangeText(DOMString replacement, unsigned long start, unsigned long end,
optional SelectionMode selectionMode = "preserve");
[Throws]
void setSelectionRange(unsigned long start, unsigned long end, optional DOMString direction);

View File

@ -50,16 +50,18 @@ interface HTMLTextAreaElement : HTMLElement {
readonly attribute NodeList labels;
// void select();
void select();
[SetterThrows]
attribute unsigned long? selectionStart;
[SetterThrows]
attribute unsigned long? selectionEnd;
[SetterThrows]
attribute DOMString? selectionDirection;
// void setRangeText(DOMString replacement);
// void setRangeText(DOMString replacement, unsigned long start, unsigned long end,
// optional SelectionMode selectionMode = "preserve");
[Throws]
void setRangeText(DOMString replacement);
[Throws]
void setRangeText(DOMString replacement, unsigned long start, unsigned long end,
optional SelectionMode selectionMode = "preserve");
[Throws]
void setSelectionRange(unsigned long start, unsigned long end, optional DOMString direction);
};

View File

@ -48,7 +48,7 @@ impl From<SelectionDirection> for DOMString {
}
}
#[derive(Clone, Copy, JSTraceable, MallocSizeOf, PartialEq)]
#[derive(Clone, Copy, Debug, JSTraceable, MallocSizeOf, PartialEq, PartialOrd)]
pub struct TextPoint {
/// 0-based line number
pub line: usize,
@ -56,6 +56,13 @@ pub struct TextPoint {
pub index: usize,
}
#[derive(Clone, Copy, PartialEq)]
pub struct SelectionState {
start: TextPoint,
end: TextPoint,
direction: SelectionDirection,
}
/// Encapsulated state for handling keyboard input in a single or multiline text input control.
#[derive(JSTraceable, MallocSizeOf)]
pub struct TextInput<T: ClipboardProvider> {
@ -63,8 +70,9 @@ pub struct TextInput<T: ClipboardProvider> {
lines: Vec<DOMString>,
/// Current cursor input point
pub edit_point: TextPoint,
/// Beginning of selection range with edit_point as end that can span multiple lines.
pub selection_begin: Option<TextPoint>,
/// The current selection goes from the selection_origin until the edit_point. Note that the
/// selection_origin may be after the edit_point, in the case of a backward selection.
pub selection_origin: Option<TextPoint>,
/// Is this a multiline input?
multiline: bool,
#[ignore_malloc_size_of = "Can't easily measure this generic type"]
@ -156,7 +164,7 @@ impl<T: ClipboardProvider> TextInput<T> {
let mut i = TextInput {
lines: vec!(),
edit_point: Default::default(),
selection_begin: None,
selection_origin: None,
multiline: lines == Lines::Multiple,
clipboard_provider: clipboard_provider,
max_length: max_length,
@ -169,7 +177,7 @@ impl<T: ClipboardProvider> TextInput<T> {
/// Remove a character at the current editing point
pub fn delete_char(&mut self, dir: Direction) {
if self.selection_begin.is_none() || self.selection_begin == Some(self.edit_point) {
if self.selection_origin.is_none() || self.selection_origin == Some(self.edit_point) {
self.adjust_horizontal_by_one(dir, Selection::Selected);
}
self.replace_selection(DOMString::new());
@ -182,46 +190,93 @@ impl<T: ClipboardProvider> TextInput<T> {
/// Insert a string at the current editing point
pub fn insert_string<S: Into<String>>(&mut self, s: S) {
if self.selection_begin.is_none() {
self.selection_begin = Some(self.edit_point);
if self.selection_origin.is_none() {
self.selection_origin = Some(self.edit_point);
}
self.replace_selection(DOMString::from(s.into()));
}
pub fn get_sorted_selection(&self) -> Option<(TextPoint, TextPoint)> {
self.selection_begin.map(|begin| {
let end = self.edit_point;
if begin.line < end.line || (begin.line == end.line && begin.index < end.index) {
(begin, end)
} else {
(end, begin)
}
})
/// The selection origin, or the edit point if there is no selection. Note that the selection
/// origin may be after the edit point, in the case of a backward selection.
pub fn selection_origin_or_edit_point(&self) -> TextPoint {
self.selection_origin.unwrap_or(self.edit_point)
}
// Check that the selection is valid.
fn assert_ok_selection(&self) {
if let Some(begin) = self.selection_begin {
debug_assert!(begin.line < self.lines.len());
debug_assert!(begin.index <= self.lines[begin.line].len());
/// The start of the selection (or the edit point, if there is no selection). Always less than
/// or equal to selection_end(), regardless of the selection direction.
pub fn selection_start(&self) -> TextPoint {
match self.selection_direction {
SelectionDirection::None | SelectionDirection::Forward => self.selection_origin_or_edit_point(),
SelectionDirection::Backward => self.edit_point,
}
debug_assert!(self.edit_point.line < self.lines.len());
debug_assert!(self.edit_point.index <= self.lines[self.edit_point.line].len());
}
/// The UTF-8 byte offset of the selection_start()
pub fn selection_start_offset(&self) -> usize {
self.text_point_to_offset(&self.selection_start())
}
/// The end of the selection (or the edit point, if there is no selection). Always greater
/// than or equal to selection_start(), regardless of the selection direction.
pub fn selection_end(&self) -> TextPoint {
match self.selection_direction {
SelectionDirection::None | SelectionDirection::Forward => self.edit_point,
SelectionDirection::Backward => self.selection_origin_or_edit_point(),
}
}
/// The UTF-8 byte offset of the selection_end()
pub fn selection_end_offset(&self) -> usize {
self.text_point_to_offset(&self.selection_end())
}
/// Whether or not there is an active selection (the selection may be zero-length)
#[inline]
pub fn has_selection(&self) -> bool {
self.selection_origin.is_some()
}
/// Returns a tuple of (start, end) giving the bounds of the current selection. start is always
/// less than or equal to end.
pub fn sorted_selection_bounds(&self) -> (TextPoint, TextPoint) {
(self.selection_start(), self.selection_end())
}
/// Return the selection range as UTF-8 byte offsets from the start of the content.
///
/// If there is no selection, returns an empty range at the insertion point.
pub fn get_absolute_selection_range(&self) -> Range<usize> {
match self.get_sorted_selection() {
Some((begin, end)) => self.get_absolute_point_for_text_point(&begin) ..
self.get_absolute_point_for_text_point(&end),
None => {
let insertion_point = self.get_absolute_insertion_point();
insertion_point .. insertion_point
/// If there is no selection, returns an empty range at the edit point.
pub fn sorted_selection_offsets_range(&self) -> Range<usize> {
self.selection_start_offset() .. self.selection_end_offset()
}
/// The state of the current selection. Can be used to compare whether selection state has changed.
pub fn selection_state(&self) -> SelectionState {
SelectionState {
start: self.selection_start(),
end: self.selection_end(),
direction: self.selection_direction,
}
}
// Check that the selection is valid.
fn assert_ok_selection(&self) {
if let Some(begin) = self.selection_origin {
debug_assert!(begin.line < self.lines.len());
debug_assert!(begin.index <= self.lines[begin.line].len());
match self.selection_direction {
SelectionDirection::None | SelectionDirection::Forward => {
debug_assert!(begin <= self.edit_point)
},
SelectionDirection::Backward => {
debug_assert!(self.edit_point <= begin)
},
}
}
debug_assert!(self.edit_point.line < self.lines.len());
debug_assert!(self.edit_point.index <= self.lines[self.edit_point.line].len());
}
pub fn get_selection_text(&self) -> Option<String> {
@ -242,78 +297,83 @@ impl<T: ClipboardProvider> TextInput<T> {
///
/// The accumulator `acc` can be mutated by the callback, and will be returned at the end.
fn fold_selection_slices<B, F: FnMut(&mut B, &str)>(&self, mut acc: B, mut f: F) -> B {
match self.get_sorted_selection() {
Some((begin, end)) if begin.line == end.line => {
f(&mut acc, &self.lines[begin.line][begin.index..end.index])
}
Some((begin, end)) => {
f(&mut acc, &self.lines[begin.line][begin.index..]);
for line in &self.lines[begin.line + 1 .. end.line] {
if self.has_selection() {
let (start, end) = self.sorted_selection_bounds();
if start.line == end.line {
f(&mut acc, &self.lines[start.line][start.index..end.index])
} else {
f(&mut acc, &self.lines[start.line][start.index..]);
for line in &self.lines[start.line + 1 .. end.line] {
f(&mut acc, "\n");
f(&mut acc, line);
}
f(&mut acc, "\n");
f(&mut acc, &self.lines[end.line][..end.index])
}
None => {}
}
acc
}
pub fn replace_selection(&mut self, insert: DOMString) {
if let Some((begin, end)) = self.get_sorted_selection() {
let allowed_to_insert_count = if let Some(max_length) = self.max_length {
let len_after_selection_replaced = self.utf16_len() - self.selection_utf16_len();
if len_after_selection_replaced >= max_length {
// If, after deleting the selection, the len is still greater than the max
// length, then don't delete/insert anything
return
}
max_length - len_after_selection_replaced
} else {
usize::MAX
};
let last_char_index = len_of_first_n_code_units(&*insert, allowed_to_insert_count);
let chars_to_insert = &insert[..last_char_index];
self.clear_selection();
let new_lines = {
let prefix = &self.lines[begin.line][..begin.index];
let suffix = &self.lines[end.line][end.index..];
let lines_prefix = &self.lines[..begin.line];
let lines_suffix = &self.lines[end.line + 1..];
let mut insert_lines = if self.multiline {
chars_to_insert.split('\n').map(|s| DOMString::from(s)).collect()
} else {
vec!(DOMString::from(chars_to_insert))
};
// FIXME(ajeffrey): effecient append for DOMStrings
let mut new_line = prefix.to_owned();
new_line.push_str(&insert_lines[0]);
insert_lines[0] = DOMString::from(new_line);
let last_insert_lines_index = insert_lines.len() - 1;
self.edit_point.index = insert_lines[last_insert_lines_index].len();
self.edit_point.line = begin.line + last_insert_lines_index;
// FIXME(ajeffrey): effecient append for DOMStrings
insert_lines[last_insert_lines_index].push_str(suffix);
let mut new_lines = vec!();
new_lines.extend_from_slice(lines_prefix);
new_lines.extend_from_slice(&insert_lines);
new_lines.extend_from_slice(lines_suffix);
new_lines
};
self.lines = new_lines;
if !self.has_selection() {
return
}
let (start, end) = self.sorted_selection_bounds();
let allowed_to_insert_count = if let Some(max_length) = self.max_length {
let len_after_selection_replaced = self.utf16_len() - self.selection_utf16_len();
if len_after_selection_replaced >= max_length {
// If, after deleting the selection, the len is still greater than the max
// length, then don't delete/insert anything
return
}
max_length - len_after_selection_replaced
} else {
usize::MAX
};
let last_char_index = len_of_first_n_code_units(&*insert, allowed_to_insert_count);
let chars_to_insert = &insert[..last_char_index];
self.clear_selection();
let new_lines = {
let prefix = &self.lines[start.line][..start.index];
let suffix = &self.lines[end.line][end.index..];
let lines_prefix = &self.lines[..start.line];
let lines_suffix = &self.lines[end.line + 1..];
let mut insert_lines = if self.multiline {
chars_to_insert.split('\n').map(|s| DOMString::from(s)).collect()
} else {
vec!(DOMString::from(chars_to_insert))
};
// FIXME(ajeffrey): effecient append for DOMStrings
let mut new_line = prefix.to_owned();
new_line.push_str(&insert_lines[0]);
insert_lines[0] = DOMString::from(new_line);
let last_insert_lines_index = insert_lines.len() - 1;
self.edit_point.index = insert_lines[last_insert_lines_index].len();
self.edit_point.line = start.line + last_insert_lines_index;
// FIXME(ajeffrey): effecient append for DOMStrings
insert_lines[last_insert_lines_index].push_str(suffix);
let mut new_lines = vec!();
new_lines.extend_from_slice(lines_prefix);
new_lines.extend_from_slice(&insert_lines);
new_lines.extend_from_slice(lines_suffix);
new_lines
};
self.lines = new_lines;
self.assert_ok_selection();
}
@ -330,8 +390,8 @@ impl<T: ClipboardProvider> TextInput<T> {
}
if select == Selection::Selected {
if self.selection_begin.is_none() {
self.selection_begin = Some(self.edit_point);
if self.selection_origin.is_none() {
self.selection_origin = Some(self.edit_point);
}
} else {
self.clear_selection();
@ -398,14 +458,19 @@ impl<T: ClipboardProvider> TextInput<T> {
fn adjust_selection_for_horizontal_change(&mut self, adjust: Direction, select: Selection)
-> bool {
if select == Selection::Selected {
if self.selection_begin.is_none() {
self.selection_begin = Some(self.edit_point);
if self.selection_origin.is_none() {
self.selection_origin = Some(self.edit_point);
}
self.selection_direction = match adjust {
Direction::Backward => SelectionDirection::Backward,
Direction::Forward => SelectionDirection::Forward,
};
} else {
if let Some((begin, end)) = self.get_sorted_selection() {
if self.has_selection() {
self.edit_point = match adjust {
Direction::Backward => begin,
Direction::Forward => end,
Direction::Backward => self.selection_start(),
Direction::Forward => self.selection_end(),
};
self.clear_selection();
return true
@ -451,7 +516,7 @@ impl<T: ClipboardProvider> TextInput<T> {
/// Select all text in the input control.
pub fn select_all(&mut self) {
self.selection_begin = Some(TextPoint {
self.selection_origin = Some(TextPoint {
line: 0,
index: 0,
});
@ -463,7 +528,14 @@ impl<T: ClipboardProvider> TextInput<T> {
/// Remove the current selection.
pub fn clear_selection(&mut self) {
self.selection_begin = None;
self.selection_origin = None;
self.selection_direction = SelectionDirection::None;
}
/// Remove the current selection and set the edit point to the end of the content.
pub fn clear_selection_to_limit(&mut self, direction: Direction) {
self.clear_selection();
self.adjust_horizontal_to_limit(direction, Selection::NotSelected);
}
pub fn adjust_horizontal_by_word(&mut self, direction: Direction, select: Selection) {
@ -780,17 +852,12 @@ impl<T: ClipboardProvider> TextInput<T> {
};
self.edit_point.line = min(self.edit_point.line, self.lines.len() - 1);
self.edit_point.index = min(self.edit_point.index, self.current_line_length());
self.selection_begin = None;
self.selection_origin = None;
self.assert_ok_selection();
}
/// Get the insertion point as a byte offset from the start of the content.
pub fn get_absolute_insertion_point(&self) -> usize {
self.get_absolute_point_for_text_point(&self.edit_point)
}
/// Convert a TextPoint into a byte offset from the start of the content.
pub fn get_absolute_point_for_text_point(&self, text_point: &TextPoint) -> usize {
fn text_point_to_offset(&self, text_point: &TextPoint) -> usize {
self.lines.iter().enumerate().fold(0, |acc, (i, val)| {
if i < text_point.line {
acc + val.len() + 1 // +1 for the \n
@ -801,7 +868,7 @@ impl<T: ClipboardProvider> TextInput<T> {
}
/// Convert a byte offset from the start of the content into a TextPoint.
pub fn get_text_point_for_absolute_point(&self, abs_point: usize) -> TextPoint {
fn offset_to_text_point(&self, abs_point: usize) -> TextPoint {
let mut index = abs_point;
let mut line = 0;
@ -842,28 +909,17 @@ impl<T: ClipboardProvider> TextInput<T> {
match direction {
SelectionDirection::None |
SelectionDirection::Forward => {
self.selection_begin = Some(self.get_text_point_for_absolute_point(start));
self.edit_point = self.get_text_point_for_absolute_point(end);
self.selection_origin = Some(self.offset_to_text_point(start));
self.edit_point = self.offset_to_text_point(end);
},
SelectionDirection::Backward => {
self.selection_begin = Some(self.get_text_point_for_absolute_point(end));
self.edit_point = self.get_text_point_for_absolute_point(start);
self.selection_origin = Some(self.offset_to_text_point(end));
self.edit_point = self.offset_to_text_point(start);
}
}
self.assert_ok_selection();
}
pub fn get_selection_start(&self) -> u32 {
let selection_start = match self.selection_begin {
Some(selection_begin_point) => {
self.get_absolute_point_for_text_point(&selection_begin_point)
},
None => self.get_absolute_insertion_point()
};
selection_start as u32
}
pub fn set_edit_point_index(&mut self, index: usize) {
let byte_size = self.lines[self.edit_point.line]
.graphemes(true)

View File

@ -15,6 +15,7 @@ use invalidation::media_queries::{MediaListKey, ToMediaListKey};
use malloc_size_of::MallocSizeOfOps;
use media_queries::{Device, MediaList};
use properties::ComputedValues;
use selector_parser::SnapshotMap;
use servo_arc::Arc;
use shared_lock::{Locked, StylesheetGuards, SharedRwLockReadGuard};
use std::sync::atomic::{AtomicUsize, Ordering};
@ -202,6 +203,7 @@ impl PerDocumentStyleDataImpl {
&mut self,
guard: &SharedRwLockReadGuard,
document_element: Option<E>,
snapshots: Option<&SnapshotMap>,
) -> bool
where
E: TElement,
@ -209,6 +211,7 @@ impl PerDocumentStyleDataImpl {
self.stylist.flush(
&StylesheetGuards::same(guard),
document_element,
snapshots,
)
}

View File

@ -2008,6 +2008,13 @@ extern "C" {
extern "C" {
pub fn Servo_Element_IsPrimaryStyleReusedViaRuleNode(element: RawGeckoElementBorrowed) -> bool;
}
extern "C" {
pub fn Servo_InvalidateStyleForDocStateChanges(
root: RawGeckoElementBorrowed,
sets: *const nsTArray<RawServoStyleSetBorrowed>,
aStatesChanged: u64,
);
}
extern "C" {
pub fn Servo_StyleSheet_FromUTF8Bytes(
loader: *mut Loader,
@ -2112,6 +2119,7 @@ extern "C" {
pub fn Servo_StyleSet_FlushStyleSheets(
set: RawServoStyleSetBorrowed,
doc_elem: RawGeckoElementBorrowedOrNull,
snapshots: *const ServoElementSnapshotTable,
);
}
extern "C" {

Some files were not shown because too many files have changed in this diff Show More