Bug 402611: Deal with changes to the safebrowsing v2 protocol. r=tony, a1.9=beltzner

This commit is contained in:
dcamp@mozilla.com 2008-01-28 15:04:43 -08:00
parent 10c6e0c9d1
commit 7112110cef
24 changed files with 2466 additions and 695 deletions

View File

@ -554,6 +554,7 @@ pref("browser.safebrowsing.provider.0.name", "Google");
pref("browser.safebrowsing.provider.0.lookupURL", "http://sb.google.com/safebrowsing/lookup?sourceid=firefox-antiphish&features=TrustRank&client={moz:client}&appver={moz:version}&");
pref("browser.safebrowsing.provider.0.keyURL", "https://sb-ssl.google.com/safebrowsing/getkey?client={moz:client}&");
pref("browser.safebrowsing.provider.0.reportURL", "http://sb.google.com/safebrowsing/report?");
+pref("browser.safebrowsing.provider.0.gethashURL", "http://sb.google.com/safebrowsing/gethash?client={moz:client}&appver={moz:version}&pver=2.1");
// privacy policy -- Both url and fallbackurl must exist, although they may
// point to the same file. fallbackurl must be a chrome url

View File

@ -47,6 +47,7 @@
// reportURL: When shown a warning bubble, we send back the user decision
// (get me out of here/ignore warning) to this URL (strip cookies
// first). This is optional.
// gethashURL: Url for requesting complete hashes from the provider.
// reportGenericURL: HTML page for general user feedback
// reportPhishURL: HTML page for notifying the provider of a new phishing page
// reportErrorURL: HTML page for notifying the provider of a false positive
@ -110,6 +111,7 @@ PROT_DataProvider.prototype.loadDataProviderPrefs_ = function() {
this.lookupURL_ = this.getUrlPref_(basePref + "lookupURL");
this.keyURL_ = this.getUrlPref_(basePref + "keyURL");
this.reportURL_ = this.getUrlPref_(basePref + "reportURL");
this.gethashURL_ = this.getUrlPref_(basePref + "gethashURL");
// Urls to HTML report pages
this.reportGenericURL_ = this.getUrlPref_(basePref + "reportGenericURL");
@ -143,6 +145,8 @@ PROT_DataProvider.prototype.updateListManager_ = function() {
// Clear the key to stop updates.
listManager.setKeyUrl("");
}
listManager.setGethashUrl(this.getGethashURL());
}
/**
@ -206,11 +210,9 @@ PROT_DataProvider.prototype.getUpdateURL = function() {
PROT_DataProvider.prototype.getLookupURL = function() {
return this.lookupURL_;
}
PROT_DataProvider.prototype.getKeyURL = function() {
return this.keyURL_;
}
PROT_DataProvider.prototype.getReportURL = function() {
return this.reportURL_;
PROT_DataProvider.prototype.getGethashURL = function() {
return this.gethashURL_;
}
PROT_DataProvider.prototype.getReportGenericURL = function() {

View File

@ -61,16 +61,14 @@ function PROT_MalwareWarden() {
var testUpdate =
"n:1000\ni:test-malware-simple\nad:1\n" +
"a:1:" + testData.length + "\n" +
testData +
"\n";
"a:1:32:" + testData.length + "\n" +
testData;
testData = "mozilla.com/firefox/its-a-trap.html";
testUpdate +=
"n:1000\ni:test-phish-simple\nad:1\n" +
"a:1:" + testData.length + "\n" +
testData +
"\n";
"a:1:32:" + testData.length + "\n" +
testData;
var dbService_ = Cc["@mozilla.org/url-classifier/dbservice;1"]
.getService(Ci.nsIUrlClassifierDBService);
@ -92,7 +90,7 @@ function PROT_MalwareWarden() {
try {
dbService_.beginUpdate(listener);
dbService_.beginStream();
dbService_.beginStream("");
dbService_.updateStream(testUpdate);
dbService_.finishStream();
dbService_.finishUpdate();

View File

@ -375,6 +375,9 @@ PROT_PhishingWarden.prototype.addWebProgressToAllTabs_ = function() {
* @param url
*/
PROT_PhishingWarden.prototype.onDocNavStart = function(request, url) {
// XXX: most of this code is dead and needs to be removed.
return;
G_Debug(this, "checkRemote: " +
(this.checkRemote_ ? "yes" : "no"));

View File

@ -73,7 +73,7 @@ var safebrowsing = {
safebrowsing.malwareWarden = malwareWarden;
// Register tables
malwareWarden.registerBlackTable("goog-malware-sha128");
malwareWarden.registerBlackTable("goog-malware-shavar");
malwareWarden.maybeToggleUpdateChecking();
@ -87,7 +87,7 @@ var safebrowsing = {
// Register tables
// XXX: move table names to a pref that we originally will download
// from the provider (need to workout protocol details)
phishWarden.registerBlackTable("goog-phish-sha128");
phishWarden.registerBlackTable("goog-phish-shavar");
// Download/update lists if we're in non-enhanced mode
phishWarden.maybeToggleUpdateChecking();

View File

@ -83,6 +83,9 @@
#define NS_URLCLASSIFIERUTILS_CONTRACTID \
"@mozilla.org/url-classifier/utils;1"
#define NS_URLCLASSIFIERHASHCOMPLETER_CONTRACTID \
"@mozilla.org/url-classifier/hashcompleter;1"
#define NS_SCRIPTABLEUNESCAPEHTML_CONTRACTID "@mozilla.org/feed-unescapehtml;1"
#define NS_NAVHISTORYSERVICE_CONTRACTID \
@ -156,6 +159,11 @@
#define NS_URLCLASSIFIERUTILS_CID \
{ 0xb7b2ccec, 0x7912, 0x4ea6, { 0xa5, 0x48, 0xb0, 0x38, 0x44, 0x70, 0x04, 0xbd} }
// {786e0a0e-e035-4600-8ee0-365a63a80b80}
#define NS_URLCLASSIFIERHASHCOMPLETER_CID \
{ 0x786e0a0e, 0xe035, 0x4600, \
{ 0x8e, 0xe0, 0x36, 0x5a, 0x63, 0xa8, 0x0b, 0x80 } }
// {10f2f5f0-f103-4901-980f-ba11bd70d60d}
#define NS_SCRIPTABLEUNESCAPEHTML_CID \
{ 0x10f2f5f0, 0xf103, 0x4901, { 0x98, 0x0f, 0xba, 0x11, 0xbd, 0x70, 0xd6, 0x0d} }

View File

@ -58,6 +58,7 @@
#include "nsUrlClassifierDBService.h"
#include "nsUrlClassifierStreamUpdater.h"
#include "nsUrlClassifierUtils.h"
#include "nsUrlClassifierHashCompleter.h"
#include "nsDocShellCID.h"
#endif
@ -88,6 +89,7 @@ NS_GENERIC_FACTORY_CONSTRUCTOR(nsTypeAheadFind)
#ifdef MOZ_URL_CLASSIFIER
NS_GENERIC_FACTORY_CONSTRUCTOR(nsUrlClassifierStreamUpdater)
NS_GENERIC_FACTORY_CONSTRUCTOR_INIT(nsUrlClassifierUtils, Init)
NS_GENERIC_FACTORY_CONSTRUCTOR_INIT(nsUrlClassifierHashCompleter, Init)
static NS_IMETHODIMP
nsUrlClassifierDBServiceConstructor(nsISupports *aOuter, REFNSIID aIID,
@ -167,6 +169,10 @@ static const nsModuleComponentInfo components[] =
NS_URLCLASSIFIERUTILS_CID,
NS_URLCLASSIFIERUTILS_CONTRACTID,
nsUrlClassifierUtilsConstructor },
{ "Url Classifier Hash Completer",
NS_URLCLASSIFIERHASHCOMPLETER_CID,
NS_URLCLASSIFIERHASHCOMPLETER_CONTRACTID,
nsUrlClassifierHashCompleterConstructor },
#endif
#ifdef MOZ_FEEDS
{ "Unescape HTML",

View File

@ -70,6 +70,7 @@ function PROT_ListManager() {
this.prefs_ = new G_Preferences();
this.updateserverURL_ = null;
this.gethashURL_ = null;
this.isTesting_ = false;
@ -90,6 +91,9 @@ function PROT_ListManager() {
this.dbService_ = Cc["@mozilla.org/url-classifier/dbservice;1"]
.getService(Ci.nsIUrlClassifierDBService);
this.hashCompleter_ = Cc["@mozilla.org/url-classifier/hashcompleter;1"]
.createInstance(Ci.nsIUrlClassifierHashCompleter);
}
/**
@ -124,6 +128,17 @@ PROT_ListManager.prototype.setUpdateUrl = function(url) {
}
}
/**
* Set the gethash url.
*/
PROT_ListManager.prototype.setGethashUrl = function(url) {
G_Debug(this, "Set gethash url: " + url);
if (url != this.gethashURL_) {
this.gethashURL_ = url;
this.hashCompleter_.gethashUrl = url;
}
}
/**
* Set the crypto key url.
* @param url String
@ -146,6 +161,7 @@ PROT_ListManager.prototype.registerTable = function(tableName,
opt_requireMac) {
this.tablesData[tableName] = {};
this.tablesData[tableName].needsUpdate = false;
this.dbService_.setHashCompleter(tableName, this.hashCompleter_);
return true;
}

View File

@ -11,6 +11,7 @@ XPIDL_MODULE = url-classifier
XPIDLSRCS = nsIUrlClassifierDBService.idl \
nsIUrlClassifierStreamUpdater.idl \
nsIUrlClassifierUtils.idl \
nsIUrlClassifierHashCompleter.idl \
nsIUrlListManager.idl \
$(NULL)

View File

@ -38,6 +38,14 @@
#include "nsISupports.idl"
%{C++
#include "nsTArray.h"
class nsUrlClassifierLookupResult;
%}
[ptr] native ResultArray(nsTArray<nsUrlClassifierLookupResult>);
interface nsIUrlClassifierHashCompleter;
// Interface for JS function callbacks
[scriptable, function, uuid(4ca27b6b-a674-4b3d-ab30-d21e2da2dffb)]
interface nsIUrlClassifierCallback : nsISupports {
@ -49,13 +57,17 @@ interface nsIUrlClassifierCallback : nsISupports {
* clients streaming updates to the url-classifier (usually
* nsUrlClassifierStreamUpdater.
*/
[scriptable, uuid(113671b8-c5cc-47d9-bc57-269568c7ce29)]
[scriptable, uuid(bb0528b3-71e2-4795-8732-d60a4476e6df)]
interface nsIUrlClassifierUpdateObserver : nsISupports {
/**
* The update requested a new URL whose contents should be downloaded
* and sent to the classifier as a new stream
* and sent to the classifier as a new stream.
*
* @param url The url that was requested.
* @param table The table name that this URL's contents will be associated
* with.
*/
void updateUrlRequested(in ACString url);
void updateUrlRequested(in ACString url, in ACString table);
/* A stream update has completed */
void streamFinished();
@ -77,7 +89,7 @@ interface nsIUrlClassifierUpdateObserver : nsISupports {
* It provides async methods for querying and updating the database. As the
* methods complete, they call the callback function.
*/
[scriptable, uuid(dc3b958e-b345-458d-83f7-77e82b42a514)]
[scriptable, uuid(bcc32b18-78be-49f6-a895-a1a341a9e94b)]
interface nsIUrlClassifierDBService : nsISupports
{
/**
@ -87,12 +99,9 @@ interface nsIUrlClassifierDBService : nsISupports
* by the service.
* @param c: The callback will be called with a comma-separated list
* of tables to which the key belongs.
* @param needsProxy: Should be true if the callback needs to be called
* in the main thread, false if the callback is threadsafe.
*/
void lookup(in ACString spec,
in nsIUrlClassifierCallback c,
in boolean needsProxy);
in nsIUrlClassifierCallback c);
/**
* Lists the tables along with which chunks are available in each table.
@ -106,6 +115,14 @@ interface nsIUrlClassifierDBService : nsISupports
*/
void getTables(in nsIUrlClassifierCallback c);
/**
* Set the nsIUrlClassifierCompleter object for a given table. This
* object will be used to request complete versions of partial
* hashes.
*/
void setHashCompleter(in ACString tableName,
in nsIUrlClassifierHashCompleter completer);
////////////////////////////////////////////////////////////////////////////
// Incremental update methods.
//
@ -135,8 +152,11 @@ interface nsIUrlClassifierDBService : nsISupports
/**
* Begin a stream update. This should be called once per url being
* fetched.
*
* @param table The table the contents of this stream will be associated
* with, or empty for the initial stream.
*/
void beginStream();
void beginStream(in ACString table);
/**
* Update the table incrementally.
@ -184,9 +204,30 @@ interface nsIUrlClassifierDBService : nsISupports
* Interface for the actual worker thread. Implementations of this need not
* be thread aware and just work on the database.
*/
[scriptable, uuid(3ed0c8f9-a5d8-4186-beb1-5d828e95ea90)]
[scriptable, uuid(76d923e5-bbde-4292-ae35-16a67d04d524)]
interface nsIUrlClassifierDBServiceWorker : nsIUrlClassifierDBService
{
// Provide a way to forcibly close the db connection.
void closeDb();
};
/**
* This is an internal helper interface for communication between the
* main thread and the dbservice worker thread. It is called for each
* lookup to provide a set of possible results, which the main thread
* may need to expand using an nsIUrlClassifierCompleter.
*/
[uuid(f1dc83c6-ad43-4f0f-a809-fd43de7de8a4)]
interface nsIUrlClassifierLookupCallback : nsISupports
{
/**
* The lookup process is complete.
*
* @param results
* If this parameter is null, there were no results found.
* If not, it contains an array of nsUrlClassifierEntry objects
* with possible matches. The callee is responsible for freeing
* this array.
*/
void lookupComplete(in ResultArray results);
};

View File

@ -0,0 +1,99 @@
/* ***** BEGIN LICENSE BLOCK *****
* Version: MPL 1.1/GPL 2.0/LGPL 2.1
*
* The contents of this file are subject to the Mozilla Public License Version
* 1.1 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
* http://www.mozilla.org/MPL/
*
* Software distributed under the License is distributed on an "AS IS" basis,
* WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License
* for the specific language governing rights and limitations under the
* License.
*
* The Original Code is mozilla.org code.
*
* The Initial Developer of the Original Code is
* Mozilla Corporation
* Portions created by the Initial Developer are Copyright (C) 2008
* the Initial Developer. All Rights Reserved.
*
* Contributor(s):
* Dave Camp <dcamp@mozilla.com>
*
* Alternatively, the contents of this file may be used under the terms of
* either the GNU General Public License Version 2 or later (the "GPL"), or
* the GNU Lesser General Public License Version 2.1 or later (the "LGPL"),
* in which case the provisions of the GPL or the LGPL are applicable instead
* of those above. If you wish to allow use of your version of this file only
* under the terms of either the GPL or the LGPL, and not to allow others to
* use your version of this file under the terms of the MPL, indicate your
* decision by deleting the provisions above and replace them with the notice
* and other provisions required by the GPL or the LGPL. If you do not delete
* the provisions above, a recipient may use your version of this file under
* the terms of any one of the MPL, the GPL or the LGPL.
*
* ***** END LICENSE BLOCK ***** */
#include "nsISupports.idl"
/**
* This interface is implemented by nsIUrlClassifierHashCompleter clients.
*/
[scriptable, uuid(bbd6c954-7cb4-4447-bc55-8cefd1ceed89)]
interface nsIUrlClassifierHashCompleterCallback : nsISupports
{
/**
* A complete hash has been found that matches the partial hash.
* This method may be called 0-n times for a given
* nsIUrlClassifierCompleter::complete() call.
*
* @param hash
* The 128-bit hash that was discovered.
* @param table
* The name of the table that this hash belongs to.
* @param chunkId
* The database chunk that this hash belongs to.
*/
void completion(in ACString hash,
in ACString table,
in PRUint32 chunkId);
/**
* The completion is complete. This method is called once per
* nsIUrlClassifierCompleter::complete() call, after all completion()
* calls are finished.
*
* @param status
* NS_OK if the request completed successfully, or an error code.
*/
void completionFinished(in nsresult status);
};
/**
* Clients updating the url-classifier database have the option of sending
* partial (32-bit) hashes of URL fragments to be blacklisted. If the
* url-classifier encounters one of these truncated hashes, it will ask
* an nsIUrlClassifierCompleter instance to asynchronously provide the
* complete hash, along with some associated metadata.
*/
[scriptable, uuid(1a3c19d9-ccd6-4d1a-a48a-1ab662e56e60)]
interface nsIUrlClassifierHashCompleter : nsISupports
{
/**
* Request a completed hash.
*
* @param partialHash
* The 32-bit hash encountered by the url-classifier.
* @param callback
* An nsIUrlClassifierCompleterCallback instance.
*/
void complete(in ACString partialHash,
in nsIUrlClassifierHashCompleterCallback callback);
/**
* The URL for the gethash request
*/
attribute ACString gethashUrl;
};

View File

@ -49,7 +49,7 @@ interface nsIUrlListManagerCallback : nsISupports {
};
[scriptable, uuid(874d6c95-fb8b-4f89-b36d-85fe267ab356)]
[scriptable, uuid(9c5598ec-9986-40cf-af40-b5e0d817a3a0)]
interface nsIUrlListManager : nsISupports
{
/**
@ -63,6 +63,12 @@ interface nsIUrlListManager : nsISupports
*/
void setKeyUrl(in ACString url);
/**
* Set the URL that we will query for complete hashes after a partial
* hash match.
*/
void setGethashUrl(in ACString url);
/**
* Add a table to the list of tables we are managing. The name is a
* string of the format provider_name-semantic_type-table_type. For

View File

@ -25,6 +25,7 @@ CPPSRCS = \
nsUrlClassifierDBService.cpp \
nsUrlClassifierStreamUpdater.cpp \
nsUrlClassifierUtils.cpp \
nsUrlClassifierHashCompleter.cpp \
$(NULL)
LOCAL_INCLUDES = \
@ -39,5 +40,3 @@ EXTRA_PP_COMPONENTS = nsUrlClassifierLib.js \
include $(topsrcdir)/config/rules.mk
export:: $(topsrcdir)/security/nss/lib/freebl/sha512.c
$(INSTALL) $^ .

View File

@ -43,9 +43,21 @@
#include <nsISupportsUtils.h>
#include "nsID.h"
#include "nsInterfaceHashtable.h"
#include "nsIObserver.h"
#include "nsIUrlClassifierHashCompleter.h"
#include "nsIUrlClassifierDBService.h"
#include "nsIURIClassifier.h"
#include "nsToolkitCompsCID.h"
// The hash length for a domain key.
#define DOMAIN_LENGTH 4
// The hash length of a partial hash entry.
#define PARTIAL_LENGTH 4
// The hash length of a complete hash entry.
#define COMPLETE_LENGTH 32
class nsUrlClassifierDBServiceWorker;
@ -74,17 +86,20 @@ public:
NS_DECL_NSIURICLASSIFIER
NS_DECL_NSIOBSERVER
PRBool GetCompleter(const nsACString& tableName,
nsIUrlClassifierHashCompleter** completer) {
return mCompleters.Get(tableName, completer);
}
private:
// No subclassing
~nsUrlClassifierDBService();
nsresult LookupURI(nsIURI* uri,
nsIUrlClassifierCallback* c,
PRBool needsProxy);
// Disallow copy constructor
nsUrlClassifierDBService(nsUrlClassifierDBService&);
nsresult LookupURI(nsIURI* uri, nsIUrlClassifierCallback* c);
// Make sure the event queue is intialized before we use it.
void EnsureThreadStarted();
@ -94,6 +109,8 @@ private:
nsCOMPtr<nsUrlClassifierDBServiceWorker> mWorker;
nsCOMPtr<nsUrlClassifierDBServiceWorker> mWorkerProxy;
nsInterfaceHashtable<nsCStringHashKey, nsIUrlClassifierHashCompleter> mCompleters;
// TRUE if the nsURIClassifier implementation should check for malware
// uris on document loads.
PRBool mCheckMalware;

View File

@ -0,0 +1,493 @@
//* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 2 -*-/
/* ***** BEGIN LICENSE BLOCK *****
* Version: MPL 1.1/GPL 2.0/LGPL 2.1
*
* The contents of this file are subject to the Mozilla Public License Version
* 1.1 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
* http://www.mozilla.org/MPL/
*
* Software distributed under the License is distributed on an "AS IS" basis,
* WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License
* for the specific language governing rights and limitations under the
* License.
*
* The Original Code is mozilla.org code.
*
* The Initial Developer of the Original Code is
* Mozilla Corporation.
* Portions created by the Initial Developer are Copyright (C) 2008
* the Initial Developer. All Rights Reserved.
*
* Contributor(s):
* Dave Camp <dcamp@mozilla.com>
*
* Alternatively, the contents of this file may be used under the terms of
* either the GNU General Public License Version 2 or later (the "GPL"), or
* the GNU Lesser General Public License Version 2.1 or later (the "LGPL"),
* in which case the provisions of the GPL or the LGPL are applicable instead
* of those above. If you wish to allow use of your version of this file only
* under the terms of either the GPL or the LGPL, and not to allow others to
* use your version of this file under the terms of the MPL, indicate your
* decision by deleting the provisions above and replace them with the notice
* and other provisions required by the GPL or the LGPL. If you do not delete
* the provisions above, a recipient may use your version of this file under
* the terms of any one of the MPL, the GPL or the LGPL.
*
* ***** END LICENSE BLOCK ***** */
#include "nsUrlClassifierHashCompleter.h"
#include "nsIChannel.h"
#include "nsIHttpChannel.h"
#include "nsIObserverService.h"
#include "nsIUploadChannel.h"
#include "nsNetUtil.h"
#include "nsStreamUtils.h"
#include "nsStringStream.h"
#include "nsServiceManagerUtils.h"
#include "nsThreadUtils.h"
#include "nsUrlClassifierDBService.h"
#include "prlog.h"
#include "prprf.h"
// NSPR_LOG_MODULES=UrlClassifierHashCompleter:5
#if defined(PR_LOGGING)
static const PRLogModuleInfo *gUrlClassifierHashCompleterLog = nsnull;
#define LOG(args) PR_LOG(gUrlClassifierHashCompleterLog, PR_LOG_DEBUG, args)
#define LOG_ENABLED() PR_LOG_TEST(gUrlClassifierHashCompleterLog, 4)
#else
#define LOG(args)
#define LOG_ENABLED() (PR_FALSE)
#endif
NS_IMPL_ISUPPORTS3(nsUrlClassifierHashCompleterRequest,
nsIRequestObserver,
nsIStreamListener,
nsIObserver)
nsresult
nsUrlClassifierHashCompleterRequest::Begin()
{
LOG(("nsUrlClassifierHashCompleterRequest::Begin [%p]", this));
nsCOMPtr<nsIObserverService> observerService =
do_GetService("@mozilla.org/observer-service;1");
if (observerService)
observerService->AddObserver(this, NS_XPCOM_SHUTDOWN_OBSERVER_ID, PR_FALSE);
nsresult rv = OpenChannel();
if (NS_FAILED(rv)) {
NotifyFailure(rv);
return rv;
}
return NS_OK;
}
nsresult
nsUrlClassifierHashCompleterRequest::Add(const nsACString& partialHash,
nsIUrlClassifierHashCompleterCallback *c)
{
LOG(("nsUrlClassifierHashCompleterRequest::Add [%p]", this));
Request *request = mRequests.AppendElement();
if (!request)
return NS_ERROR_OUT_OF_MEMORY;
request->partialHash = partialHash;
request->callback = c;
return NS_OK;
}
nsresult
nsUrlClassifierHashCompleterRequest::OpenChannel()
{
LOG(("nsUrlClassifierHashCompleterRequest::OpenChannel [%p]", this));
nsresult rv;
rv = NS_NewChannel(getter_AddRefs(mChannel), mURI);
NS_ENSURE_SUCCESS(rv, rv);
nsCAutoString requestBody;
rv = BuildRequest(requestBody);
NS_ENSURE_SUCCESS(rv, rv);
rv = AddRequestBody(requestBody);
NS_ENSURE_SUCCESS(rv, rv);
rv = mChannel->AsyncOpen(this, nsnull);
NS_ENSURE_SUCCESS(rv, rv);
return NS_OK;
}
nsresult
nsUrlClassifierHashCompleterRequest::BuildRequest(nsCAutoString &aRequestBody)
{
LOG(("nsUrlClassifierHashCompleterRequest::BuildRequest [%p]", this));
nsCAutoString body;
for (PRUint32 i = 0; i < mRequests.Length(); i++) {
Request &request = mRequests[i];
body.Append(request.partialHash);
}
aRequestBody.AppendInt(PARTIAL_LENGTH);
aRequestBody.Append(':');
aRequestBody.AppendInt(body.Length());
aRequestBody.Append('\n');
aRequestBody.Append(body);
return NS_OK;
}
nsresult
nsUrlClassifierHashCompleterRequest::AddRequestBody(const nsACString &aRequestBody)
{
LOG(("nsUrlClassifierHashCompleterRequest::AddRequestBody [%p]", this));
nsresult rv;
nsCOMPtr<nsIStringInputStream> strStream =
do_CreateInstance(NS_STRINGINPUTSTREAM_CONTRACTID, &rv);
NS_ENSURE_SUCCESS(rv, rv);
rv = strStream->SetData(aRequestBody.BeginReading(),
aRequestBody.Length());
NS_ENSURE_SUCCESS(rv, rv);
nsCOMPtr<nsIUploadChannel> uploadChannel = do_QueryInterface(mChannel, &rv);
NS_ENSURE_SUCCESS(rv, rv);
rv = uploadChannel->SetUploadStream(strStream,
NS_LITERAL_CSTRING("text/plain"),
-1);
NS_ENSURE_SUCCESS(rv, rv);
nsCOMPtr<nsIHttpChannel> httpChannel = do_QueryInterface(mChannel, &rv);
NS_ENSURE_SUCCESS(rv, rv);
rv = httpChannel->SetRequestMethod(NS_LITERAL_CSTRING("POST"));
NS_ENSURE_SUCCESS(rv, rv);
return NS_OK;
}
nsresult
nsUrlClassifierHashCompleterRequest::HandleItem(const nsACString& item,
const nsACString& tableName,
PRUint32 chunkId)
{
// If this item matches any of the requested partial hashes, add them
// to the response.
for (PRUint32 i = 0; i < mRequests.Length(); i++) {
Request &request = mRequests[i];
if (StringBeginsWith(item, request.partialHash)) {
Response *response = request.responses.AppendElement();
if (!response)
return NS_ERROR_OUT_OF_MEMORY;
response->completeHash = item;
response->tableName = tableName;
response->chunkId = chunkId;
}
}
return NS_OK;
}
/**
* Reads one table of results from the response. Leaves begin pointing at the
* next table.
*/
nsresult
nsUrlClassifierHashCompleterRequest::HandleTable(const nsACString& response,
nsACString::const_iterator& begin)
{
nsACString::const_iterator iter, end;
iter = begin;
response.EndReading(end);
if (!FindCharInReadable(':', iter, end)) {
// No table line.
NS_WARNING("Received badly-formatted gethash response.");
return NS_ERROR_FAILURE;
}
const nsCSubstring& tableName = Substring(begin, iter);
iter++;
begin = iter;
if (!FindCharInReadable('\n', iter, end)) {
// Unterminated header line.
NS_WARNING("Received badly-formatted gethash response.");
return NS_ERROR_FAILURE;
}
const nsCSubstring& remaining = Substring(begin, iter);
iter++;
begin = iter;
PRUint32 chunkId;
PRInt32 size;
if (PR_sscanf(PromiseFlatCString(remaining).get(),
"%u:%d", &chunkId, &size) != 2) {
NS_WARNING("Received badly-formatted gethash response.");
return NS_ERROR_FAILURE;
}
if (size % COMPLETE_LENGTH != 0) {
NS_WARNING("Unexpected gethash response length");
return NS_ERROR_FAILURE;
}
// begin now refers to the hash data.
if (begin.size_forward() < size) {
NS_WARNING("Response does not match the expected response length.");
return NS_ERROR_FAILURE;
}
for (PRInt32 i = 0; i < (size / COMPLETE_LENGTH); i++) {
// Read the complete hash.
iter.advance(COMPLETE_LENGTH);
nsresult rv = HandleItem(Substring(begin, iter), tableName, chunkId);
NS_ENSURE_SUCCESS(rv, rv);
begin = iter;
}
// begin now points at the end of the hash data.
return NS_OK;
}
nsresult
nsUrlClassifierHashCompleterRequest::HandleResponse()
{
if (mResponse.IsEmpty()) {
// Empty response, we're done.
return NS_OK;
}
nsCString::const_iterator begin, end;
mResponse.BeginReading(begin);
mResponse.EndReading(end);
while (begin != end) {
nsresult rv = HandleTable(mResponse, begin);
NS_ENSURE_SUCCESS(rv, rv);
}
return NS_OK;
}
void
nsUrlClassifierHashCompleterRequest::NotifySuccess()
{
LOG(("nsUrlClassifierHashCompleterRequest::NotifySuccess [%p]", this));
for (PRUint32 i = 0; i < mRequests.Length(); i++) {
Request &request = mRequests[i];
for (PRUint32 j = 0; j < request.responses.Length(); j++) {
Response &response = request.responses[j];
request.callback->Completion(response.completeHash,
response.tableName,
response.chunkId);
}
request.callback->CompletionFinished(NS_OK);
}
}
void
nsUrlClassifierHashCompleterRequest::NotifyFailure(nsresult status)
{
LOG(("nsUrlClassifierHashCompleterRequest::NotifyFailure [%p]", this));
for (PRUint32 i = 0; i < mRequests.Length(); i++) {
Request &request = mRequests[i];
request.callback->CompletionFinished(status);
}
}
NS_IMETHODIMP
nsUrlClassifierHashCompleterRequest::OnStartRequest(nsIRequest *request,
nsISupports *context)
{
LOG(("nsUrlClassifierHashCompleter::OnStartRequest [%p]", this));
return NS_OK;
}
NS_IMETHODIMP
nsUrlClassifierHashCompleterRequest::OnDataAvailable(nsIRequest *request,
nsISupports *context,
nsIInputStream *stream,
PRUint32 sourceOffset,
PRUint32 length)
{
LOG(("nsUrlClassifierHashCompleter::OnDataAvailable [%p]", this));
if (mShuttingDown)
return NS_ERROR_ABORT;
nsCAutoString piece;
nsresult rv = NS_ConsumeStream(stream, length, piece);
NS_ENSURE_SUCCESS(rv, rv);
mResponse.Append(piece);
return NS_OK;
}
NS_IMETHODIMP
nsUrlClassifierHashCompleterRequest::OnStopRequest(nsIRequest *request,
nsISupports *context,
nsresult status)
{
LOG(("nsUrlClassifierHashCompleter::OnStopRequest [%p, status=%d]",
this, status));
nsCOMPtr<nsIObserverService> observerService =
do_GetService("@mozilla.org/observer-service;1");
if (observerService)
observerService->RemoveObserver(this, NS_XPCOM_SHUTDOWN_OBSERVER_ID);
if (mShuttingDown)
return NS_ERROR_ABORT;
if (NS_SUCCEEDED(status)) {
nsCOMPtr<nsIHttpChannel> channel = do_QueryInterface(request);
if (channel) {
PRBool success;
status = channel->GetRequestSucceeded(&success);
if (NS_SUCCEEDED(status) && !success) {
status = NS_ERROR_ABORT;
}
}
}
if (NS_SUCCEEDED(status))
status = HandleResponse();
if (NS_SUCCEEDED(status))
NotifySuccess();
else
NotifyFailure(status);
mChannel = nsnull;
return NS_OK;
}
NS_IMETHODIMP
nsUrlClassifierHashCompleterRequest::Observe(nsISupports *subject,
const char *topic,
const PRUnichar *data)
{
if (!strcmp(topic, NS_XPCOM_SHUTDOWN_OBSERVER_ID)) {
mShuttingDown = PR_TRUE;
if (mChannel)
mChannel->Cancel(NS_ERROR_ABORT);
}
return NS_OK;
}
NS_IMPL_ISUPPORTS4(nsUrlClassifierHashCompleter,
nsIUrlClassifierHashCompleter,
nsIRunnable,
nsIObserver,
nsISupportsWeakReference)
nsresult
nsUrlClassifierHashCompleter::Init()
{
#if defined(PR_LOGGING)
if (!gUrlClassifierHashCompleterLog)
gUrlClassifierHashCompleterLog = PR_NewLogModule("UrlClassifierHashCompleter");
#endif
nsCOMPtr<nsIObserverService> observerService =
do_GetService("@mozilla.org/observer-service;1");
if (observerService)
observerService->AddObserver(this, NS_XPCOM_SHUTDOWN_OBSERVER_ID, PR_TRUE);
return NS_OK;
}
NS_IMETHODIMP
nsUrlClassifierHashCompleter::Complete(const nsACString &partialHash,
nsIUrlClassifierHashCompleterCallback *c)
{
LOG(("nsUrlClassifierHashCompleter::Complete [%p]", this));
if (mShuttingDown)
return NS_ERROR_NOT_INITIALIZED;
if (!mURI) {
NS_WARNING("Trying to use nsUrlClassifierHashCompleter without setting the gethash URI.");
return NS_ERROR_NOT_INITIALIZED;
}
// We batch all of the requested completions in a single request until the
// next time we reach the main loop.
if (!mRequest) {
mRequest = new nsUrlClassifierHashCompleterRequest(mURI);
if (!mRequest) {
return NS_ERROR_OUT_OF_MEMORY;
}
// Schedule ourselves to start this request on the main loop.
NS_DispatchToCurrentThread(this);
}
return mRequest->Add(partialHash, c);
}
NS_IMETHODIMP
nsUrlClassifierHashCompleter::SetGethashUrl(const nsACString &url)
{
return NS_NewURI(getter_AddRefs(mURI), url);
}
NS_IMETHODIMP
nsUrlClassifierHashCompleter::GetGethashUrl(nsACString &url)
{
url.Truncate();
if (mURI) {
return mURI->GetSpec(url);
}
return NS_OK;
}
NS_IMETHODIMP
nsUrlClassifierHashCompleter::Run()
{
LOG(("nsUrlClassifierHashCompleter::Run [%p]\n", this));
if (mShuttingDown) {
mRequest = nsnull;
return NS_ERROR_NOT_INITIALIZED;
}
if (!mRequest)
return NS_OK;
// Dispatch the http request.
nsresult rv = mRequest->Begin();
mRequest = nsnull;
return rv;
}
NS_IMETHODIMP
nsUrlClassifierHashCompleter::Observe(nsISupports *subject, const char *topic,
const PRUnichar *data)
{
if (!strcmp(topic, NS_XPCOM_SHUTDOWN_OBSERVER_ID)) {
mShuttingDown = PR_TRUE;
}
return NS_OK;
}

View File

@ -0,0 +1,129 @@
//* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 2 -*-/
/* ***** BEGIN LICENSE BLOCK *****
* Version: MPL 1.1/GPL 2.0/LGPL 2.1
*
* The contents of this file are subject to the Mozilla Public License Version
* 1.1 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
* http://www.mozilla.org/MPL/
*
* Software distributed under the License is distributed on an "AS IS" basis,
* WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License
* for the specific language governing rights and limitations under the
* License.
*
* The Original Code is mozilla.org code.
*
* The Initial Developer of the Original Code is
* Mozilla Corporation.
* Portions created by the Initial Developer are Copyright (C) 2008
* the Initial Developer. All Rights Reserved.
*
* Contributor(s):
* Dave Camp <dcamp@mozilla.com>
*
* Alternatively, the contents of this file may be used under the terms of
* either the GNU General Public License Version 2 or later (the "GPL"), or
* the GNU Lesser General Public License Version 2.1 or later (the "LGPL"),
* in which case the provisions of the GPL or the LGPL are applicable instead
* of those above. If you wish to allow use of your version of this file only
* under the terms of either the GPL or the LGPL, and not to allow others to
* use your version of this file under the terms of the MPL, indicate your
* decision by deleting the provisions above and replace them with the notice
* and other provisions required by the GPL or the LGPL. If you do not delete
* the provisions above, a recipient may use your version of this file under
* the terms of any one of the MPL, the GPL or the LGPL.
*
* ***** END LICENSE BLOCK ***** */
#ifndef nsUrlClassifierHashCompleter_h_
#define nsUrlClassifierHashCompleter_h_
#include "nsIUrlClassifierHashCompleter.h"
#include "nsAutoPtr.h"
#include "nsCOMPtr.h"
#include "nsIChannel.h"
#include "nsIObserver.h"
#include "nsIRunnable.h"
#include "nsIStreamListener.h"
#include "nsIURI.h"
#include "nsTArray.h"
#include "nsString.h"
#include "nsWeakReference.h"
class nsUrlClassifierHashCompleterRequest : public nsIStreamListener
, public nsIObserver
{
public:
NS_DECL_ISUPPORTS
NS_DECL_NSIREQUESTOBSERVER
NS_DECL_NSISTREAMLISTENER
NS_DECL_NSIOBSERVER
nsUrlClassifierHashCompleterRequest(nsIURI *uri)
: mShuttingDown(PR_FALSE)
, mURI(uri) { }
~nsUrlClassifierHashCompleterRequest() { }
nsresult Begin();
nsresult Add(const nsACString &partialHash,
nsIUrlClassifierHashCompleterCallback *c);
private:
nsresult OpenChannel();
nsresult BuildRequest(nsCAutoString &request);
nsresult AddRequestBody(const nsACString &requestBody);
nsresult HandleItem(const nsACString &item,
const nsACString &table,
PRUint32 item);
nsresult HandleTable(const nsACString &response,
nsACString::const_iterator &begin);
nsresult HandleResponse();
void NotifySuccess();
void NotifyFailure(nsresult status);
PRBool mShuttingDown;
nsCOMPtr<nsIURI> mURI;
nsCOMPtr<nsIChannel> mChannel;
nsCString mResponse;
struct Response {
nsCString completeHash;
nsCString tableName;
PRUint32 chunkId;
};
struct Request {
nsCString partialHash;
nsTArray<Response> responses;
nsCOMPtr<nsIUrlClassifierHashCompleterCallback> callback;
};
nsTArray<Request> mRequests;
};
class nsUrlClassifierHashCompleter : public nsIUrlClassifierHashCompleter
, public nsIRunnable
, public nsIObserver
, public nsSupportsWeakReference
{
public:
NS_DECL_ISUPPORTS
NS_DECL_NSIURLCLASSIFIERHASHCOMPLETER
NS_DECL_NSIRUNNABLE
NS_DECL_NSIOBSERVER
nsUrlClassifierHashCompleter() : mShuttingDown(PR_FALSE) {}
~nsUrlClassifierHashCompleter() {}
nsresult Init();
private:
nsRefPtr<nsUrlClassifierHashCompleterRequest> mRequest;
nsCOMPtr<nsIURI> mURI;
PRBool mShuttingDown;
};
#endif // nsUrlClassifierHashCompleter_h_

View File

@ -94,7 +94,7 @@ nsUrlClassifierStreamUpdater::DownloadDone()
LOG(("nsUrlClassifierStreamUpdater::DownloadDone [this=%p]", this));
mIsUpdating = PR_FALSE;
mPendingUpdateUrls.Clear();
mPendingUpdates.Clear();
mSuccessCallback = nsnull;
mUpdateErrorCallback = nsnull;
mDownloadErrorCallback = nsnull;
@ -127,7 +127,8 @@ nsUrlClassifierStreamUpdater::SetUpdateUrl(const nsACString & aUpdateUrl)
nsresult
nsUrlClassifierStreamUpdater::FetchUpdate(nsIURI *aUpdateUrl,
const nsACString & aRequestBody)
const nsACString & aRequestBody,
const nsACString & aStreamTable)
{
nsresult rv;
rv = NS_NewChannel(getter_AddRefs(mChannel), aUpdateUrl, nsnull, nsnull, this);
@ -142,18 +143,23 @@ nsUrlClassifierStreamUpdater::FetchUpdate(nsIURI *aUpdateUrl,
rv = mChannel->AsyncOpen(this, nsnull);
NS_ENSURE_SUCCESS(rv, rv);
mStreamTable = aStreamTable;
return NS_OK;
}
nsresult
nsUrlClassifierStreamUpdater::FetchUpdate(const nsACString & aUpdateUrl,
const nsACString & aRequestBody)
const nsACString & aRequestBody,
const nsACString & aStreamTable)
{
nsCOMPtr<nsIURI> uri;
nsresult rv = NS_NewURI(getter_AddRefs(uri), aUpdateUrl);
NS_ENSURE_SUCCESS(rv, rv);
return FetchUpdate(uri, aRequestBody);
LOG(("Fetching update from %s\n", PromiseFlatCString(aUpdateUrl).get()));
return FetchUpdate(uri, aRequestBody, aStreamTable);
}
NS_IMETHODIMP
@ -215,23 +221,29 @@ nsUrlClassifierStreamUpdater::DownloadUpdates(
*_retval = PR_TRUE;
return FetchUpdate(mUpdateUrl, aRequestBody);
return FetchUpdate(mUpdateUrl, aRequestBody, EmptyCString());
}
///////////////////////////////////////////////////////////////////////////////
// nsIUrlClassifierUpdateObserver implementation
NS_IMETHODIMP
nsUrlClassifierStreamUpdater::UpdateUrlRequested(const nsACString &aUrl)
nsUrlClassifierStreamUpdater::UpdateUrlRequested(const nsACString &aUrl,
const nsACString &aTable)
{
LOG(("Queuing requested update from %s\n", PromiseFlatCString(aUrl).get()));
PendingUpdate *update = mPendingUpdates.AppendElement();
if (!update)
return NS_ERROR_OUT_OF_MEMORY;
// Allow data: urls for unit testing purposes, otherwise assume http
if (StringBeginsWith(aUrl, NS_LITERAL_CSTRING("data:"))) {
mPendingUpdateUrls.AppendElement(aUrl);
update->mUrl = aUrl;
} else {
mPendingUpdateUrls.AppendElement(NS_LITERAL_CSTRING("http://") + aUrl);
update->mUrl = NS_LITERAL_CSTRING("http://") + aUrl;
}
update->mTable = aTable;
return NS_OK;
}
@ -242,15 +254,16 @@ nsUrlClassifierStreamUpdater::StreamFinished()
nsresult rv;
// Pop off a pending URL and update it.
if (mPendingUpdateUrls.Length() > 0) {
rv = FetchUpdate(mPendingUpdateUrls[0], NS_LITERAL_CSTRING(""));
if (mPendingUpdates.Length() > 0) {
PendingUpdate &update = mPendingUpdates[0];
rv = FetchUpdate(update.mUrl, EmptyCString(), update.mTable);
if (NS_FAILED(rv)) {
LOG(("Error fetching update url: %s\n", mPendingUpdateUrls[0].get()));
LOG(("Error fetching update url: %s\n", update.mUrl.get()));
mDBService->CancelUpdate();
return rv;
}
mPendingUpdateUrls.RemoveElementAt(0);
mPendingUpdates.RemoveElementAt(0);
} else {
mDBService->FinishUpdate();
}
@ -262,7 +275,7 @@ NS_IMETHODIMP
nsUrlClassifierStreamUpdater::UpdateSuccess(PRUint32 requestedTimeout)
{
LOG(("nsUrlClassifierStreamUpdater::UpdateSuccess [this=%p]", this));
NS_ASSERTION(mPendingUpdateUrls.Length() == 0,
NS_ASSERTION(mPendingUpdates.Length() == 0,
"Didn't fetch all update URLs.");
// DownloadDone() clears mSuccessCallback, so we save it off here.
@ -330,12 +343,14 @@ nsUrlClassifierStreamUpdater::AddRequestBody(const nsACString &aRequestBody)
// nsIStreamListenerObserver implementation
NS_IMETHODIMP
nsUrlClassifierStreamUpdater::OnStartRequest(nsIRequest *request, nsISupports* context)
nsUrlClassifierStreamUpdater::OnStartRequest(nsIRequest *request,
nsISupports* context)
{
nsresult rv;
rv = mDBService->BeginStream();
rv = mDBService->BeginStream(mStreamTable);
NS_ENSURE_SUCCESS(rv, rv);
mStreamTable.Truncate();
nsCOMPtr<nsIHttpChannel> httpChannel = do_QueryInterface(request);
if (httpChannel) {

View File

@ -87,16 +87,25 @@ private:
nsresult AddRequestBody(const nsACString &aRequestBody);
nsresult FetchUpdate(nsIURI *aURI, const nsACString &aRequestBody);
nsresult FetchUpdate(const nsACString &aURI, const nsACString &aRequestBody);
nsresult FetchUpdate(nsIURI *aURI,
const nsACString &aRequestBody,
const nsACString &aTable);
nsresult FetchUpdate(const nsACString &aURI,
const nsACString &aRequestBody,
const nsACString &aTable);
PRBool mIsUpdating;
PRBool mInitialized;
nsCOMPtr<nsIURI> mUpdateUrl;
nsCString mStreamTable;
nsCOMPtr<nsIChannel> mChannel;
nsCOMPtr<nsIUrlClassifierDBService> mDBService;
nsTArray<nsCAutoString> mPendingUpdateUrls;
struct PendingUpdate {
nsCString mUrl;
nsCString mTable;
};
nsTArray<PendingUpdate> mPendingUpdates;
nsCOMPtr<nsIUrlClassifierCallback> mSuccessCallback;
nsCOMPtr<nsIUrlClassifierCallback> mUpdateErrorCallback;

View File

@ -65,17 +65,21 @@ var streamUpdater = Cc["@mozilla.org/url-classifier/streamupdater;1"]
* }
*/
function buildUpdate(update) {
function buildUpdate(update, hashSize) {
if (!hashSize) {
hashSize = 32;
}
var updateStr = "n:1000\n";
for (var tableName in update) {
updateStr += "i:" + tableName + "\n";
if (tableName != "")
updateStr += "i:" + tableName + "\n";
var chunks = update[tableName];
for (var j = 0; j < chunks.length; j++) {
var chunk = chunks[j];
var chunkType = chunk.chunkType ? chunk.chunkType : 'a';
var chunkNum = chunk.chunkNum ? chunk.chunkNum : j;
updateStr += chunkType + ':' + chunkNum;
updateStr += chunkType + ':' + chunkNum + ':' + hashSize;
if (chunk.urls) {
var chunkData = chunk.urls.join("\n");
@ -89,8 +93,12 @@ function buildUpdate(update) {
return updateStr;
}
function buildPhishingUpdate(chunks) {
return buildUpdate({"test-phish-simple" : chunks});
function buildPhishingUpdate(chunks, hashSize) {
return buildUpdate({"test-phish-simple" : chunks}, hashSize);
}
function buildBareUpdate(chunks, hashSize) {
return buildUpdate({"" : chunks}, hashSize);
}
/**
@ -113,7 +121,7 @@ function doSimpleUpdate(updateText, success, failure) {
};
dbservice.beginUpdate(listener);
dbservice.beginStream();
dbservice.beginStream("");
dbservice.updateStream(updateText);
dbservice.finishStream();
dbservice.finishUpdate();
@ -152,6 +160,8 @@ tableData : function(expectedTables, cb)
checkUrls: function(urls, expected, cb)
{
// work with a copy of the list.
urls = urls.slice(0);
var doLookup = function() {
if (urls.length > 0) {
var fragment = urls.shift();
@ -177,6 +187,11 @@ urlsExist: function(urls, cb)
this.checkUrls(urls, 'test-phish-simple', cb);
},
malwareUrlsExist: function(urls, cb)
{
this.checkUrls(urls, 'test-malware-simple', cb);
},
subsDontExist: function(urls, cb)
{
// XXX: there's no interface for checking items in the subs table
@ -217,8 +232,6 @@ function updateError(arg)
// Runs a set of updates, and then checks a set of assertions.
function doUpdateTest(updates, assertions, successCallback, errorCallback) {
dbservice.resetDatabase();
var runUpdate = function() {
if (updates.length > 0) {
var update = updates.shift();
@ -241,6 +254,10 @@ function runNextTest()
return;
}
dbservice.resetDatabase();
dbservice.setHashCompleter('test-phish-simple', null);
dumpn("running " + gTests[gNextTest]);
gTests[gNextTest++]();
}
@ -250,4 +267,22 @@ function runTests(tests)
runNextTest();
}
function Timer(delay, cb) {
this.cb = cb;
var timer = Cc["@mozilla.org/timer;1"].createInstance(Ci.nsITimer);
timer.initWithCallback(this, delay, timer.TYPE_ONE_SHOT);
}
Timer.prototype = {
QueryInterface: function(iid) {
if (!iid.equals(Ci.nsISupports) && !iid.equals(Ci.nsITimerCallback)) {
throw Cr.NS_ERROR_NO_INTERFACE;
}
return this;
},
notify: function(timer) {
this.cb();
}
}
cleanUp();

View File

@ -44,19 +44,19 @@ function testMultipleAdds() {
function testSimpleSub()
{
var addUrls = ["foo.com/a", "bar.com/b"];
var subUrls = ["foo.com/a"];
var subUrls = ["1:foo.com/a"];
var addUpdate = buildPhishingUpdate(
[{ "chunkNum" : 1, // adds and subtracts don't share a chunk numbering space
"urls": addUrls }]);
var subUpdate = buildPhishingUpdate(
[{ "chunkNum" : 1,
[{ "chunkNum" : 50,
"chunkType" : "s",
"urls": subUrls }]);
var assertions = {
"tableData" : "test-phish-simple;a:1:s:1",
"tableData" : "test-phish-simple;a:1:s:50",
"urlsExist" : [ "bar.com/b" ],
"urlsDontExist": ["foo.com/a" ],
"subsDontExist" : [ "foo.com/a" ]
@ -69,20 +69,20 @@ function testSimpleSub()
// Same as testSimpleSub(), but the sub comes in before the add.
function testSubEmptiesAdd()
{
var subUrls = ["foo.com/a"];
var subUrls = ["1:foo.com/a"];
var addUrls = ["foo.com/a", "bar.com/b"];
var subUpdate = buildPhishingUpdate(
[{ "chunkNum" : 1,
[{ "chunkNum" : 50,
"chunkType" : "s",
"urls": subUrls }]);
var addUpdate = buildPhishingUpdate(
[{ "chunkNum" : 1, // adds and subtracts don't share a chunk numbering space
[{ "chunkNum" : 1,
"urls": addUrls }]);
var assertions = {
"tableData" : "test-phish-simple;a:1:s:1",
"tableData" : "test-phish-simple;a:1:s:50",
"urlsExist" : [ "bar.com/b" ],
"urlsDontExist": ["foo.com/a" ],
"subsDontExist" : [ "foo.com/a" ] // this sub was found, it shouldn't exist anymore
@ -95,7 +95,7 @@ function testSubEmptiesAdd()
// still have an item left over that needs to be synced.
function testSubPartiallyEmptiesAdd()
{
var subUrls = ["foo.com/a"];
var subUrls = ["1:foo.com/a"];
var addUrls = ["foo.com/a", "foo.com/b", "bar.com/b"];
var subUpdate = buildPhishingUpdate(
@ -123,7 +123,7 @@ function testSubPartiallyEmptiesAdd()
// then adding it twice should leave the url intact.
function testPendingSubRemoved()
{
var subUrls = ["foo.com/a", "foo.com/b"];
var subUrls = ["1:foo.com/a", "2:foo.com/b"];
var addUrls = ["foo.com/a", "foo.com/b"];
var subUpdate = buildPhishingUpdate(
@ -151,7 +151,7 @@ function testPendingSubRemoved()
// Make sure that a saved sub is removed when the sub chunk is expired.
function testPendingSubExpire()
{
var subUrls = ["foo.com/a", "foo.com/b"];
var subUrls = ["1:foo.com/a", "1:foo.com/b"];
var addUrls = ["foo.com/a", "foo.com/b"];
var subUpdate = buildPhishingUpdate(
@ -176,7 +176,7 @@ function testPendingSubExpire()
doTest([subUpdate, expireUpdate, addUpdate], assertions);
}
// Two adds plus one sub of the same URL will leave one of the adds there
// Make sure that the sub url removes from only the chunk that it specifies
function testDuplicateAdds()
{
var urls = ["foo.com/a"];
@ -190,7 +190,7 @@ function testDuplicateAdds()
var subUpdate = buildPhishingUpdate(
[{ "chunkNum" : 3,
"chunkType" : "s",
"urls": urls }]);
"urls": ["2:foo.com/a"]}]);
var assertions = {
"tableData" : "test-phish-simple;a:1-2:s:3",
@ -204,17 +204,17 @@ function testDuplicateAdds()
// Tests a sub which matches some existing adds but leaves others.
function testSubPartiallyMatches()
{
var addUrls = ["foo.com/a"];
var subUrls = ["foo.com/a", "foo.com/b"];
var subUrls = ["foo.com/a"];
var addUrls = ["1:foo.com/a", "2:foo.com/b"];
var addUpdate = buildPhishingUpdate(
[{ "chunkNum" : 1,
"chunkType" : "s",
"urls" : addUrls }]);
var subUpdate = buildPhishingUpdate(
[{ "chunkNum" : 1,
"urls" : subUrls }]);
"chunkType" : "s",
"urls" : addUrls }]);
var assertions = {
"tableData" : "test-phish-simple;a:1:s:1",
@ -232,7 +232,7 @@ function testSubPartiallyMatches()
function testSubPartiallyMatches2()
{
var addUrls = ["foo.com/a"];
var subUrls = ["foo.com/a", "foo.com/b"];
var subUrls = ["1:foo.com/a", "2:foo.com/b"];
var addUrls2 = ["foo.com/b"];
var addUpdate = buildPhishingUpdate(
@ -258,11 +258,10 @@ function testSubPartiallyMatches2()
}
// Verify that two subs for the same domain but from different chunks
// match (tests that existing sub entries are properly updated, and
// helps exercise nsUrlClassifierEntry::RemoveFragments().
// match (tests that existing sub entries are properly updated)
function testSubsDifferentChunks() {
var subUrls1 = [ "foo.com/a" ];
var subUrls2 = [ "foo.com/b" ];
var subUrls1 = [ "3:foo.com/a" ];
var subUrls2 = [ "3:foo.com/b" ];
var addUrls = [ "foo.com/a", "foo.com/b", "foo.com/c" ];

View File

@ -23,6 +23,12 @@ var chunk3Urls = [
];
var chunk3 = chunk3Urls.join("\n");
var chunk3SubUrls = [
"1:test.com/a",
"1:foo.bar.com/a",
"2:blah.com/a" ];
var chunk3Sub = chunk3SubUrls.join("\n");
var chunk4Urls = [
"a.com/b",
"b.com/c",
@ -204,8 +210,8 @@ function do_subs() {
var data =
"n:1000\n" +
"i:testing-phish-simple\n" +
"s:3:" + chunk3.length + "\n" +
chunk3 + "\n" +
"s:3:32:" + chunk3Sub.length + "\n" +
chunk3Sub + "\n" +
"ad:1\n" +
"ad:4-6\n";
@ -226,18 +232,18 @@ function do_adds() {
var data =
"n:1000\n" +
"i:testing-phish-simple\n" +
"a:1:" + chunk1.length + "\n" +
"a:1:32:" + chunk1.length + "\n" +
chunk1 + "\n" +
"a:2:" + chunk2.length + "\n" +
"a:2:32:" + chunk2.length + "\n" +
chunk2 + "\n" +
"a:4:" + chunk4.length + "\n" +
"a:4:32:" + chunk4.length + "\n" +
chunk4 + "\n" +
"a:5:" + chunk5.length + "\n" +
"a:5:32:" + chunk5.length + "\n" +
chunk5 + "\n" +
"a:6:" + chunk6.length + "\n" +
"a:6:32:" + chunk6.length + "\n" +
chunk6 + "\n" +
"i:testing-malware-simple\n" +
"a:1:" + chunk2.length + "\n" +
"a:1:32:" + chunk2.length + "\n" +
chunk2 + "\n";
doSimpleUpdate(data, testAddSuccess, testFailure);

View File

@ -0,0 +1,403 @@
/**
* DummyCompleter() lets tests easily specify the results of a partial
* hash completion request.
*/
function DummyCompleter() {
this.fragments = {};
this.queries = [];
}
DummyCompleter.prototype =
{
QueryInterface: function(iid)
{
if (!iid.equals(Ci.nsISupports) &&
!iid.equals(Ci.nsIUrlClassifierHashCompleter)) {
throw Cr.NS_ERROR_NO_INTERFACE;
}
return this;
},
complete: function(partialHash, cb)
{
this.queries.push(partialHash);
var fragments = this.fragments;
var doCallback = function() {
if (this.alwaysFail) {
cb.completionFinished(1);
return;
}
var results;
if (fragments[partialHash]) {
for (var i = 0; i < fragments[partialHash].length; i++) {
var chunkId = fragments[partialHash][i][0];
var hash = fragments[partialHash][i][1];
cb.completion(hash, "test-phish-simple", chunkId);
}
}
cb.completionFinished(0);
}
var timer = new Timer(0, doCallback);
},
getHash: function(fragment)
{
var converter = Cc["@mozilla.org/intl/scriptableunicodeconverter"].
createInstance(Ci.nsIScriptableUnicodeConverter);
converter.charset = "UTF-8";
var result = {};
var data = converter.convertToByteArray(fragment, result);
var ch = Cc["@mozilla.org/security/hash;1"].createInstance(Ci.nsICryptoHash);
ch.init(ch.SHA256);
ch.update(data, data.length);
var hash = ch.finish(false);
return hash.slice(0, 32);
},
addFragment: function(chunkId, fragment)
{
this.addHash(chunkId, this.getHash(fragment));
},
// This method allows the caller to generate complete hashes that match the
// prefix of a real fragment, but have different complete hashes.
addConflict: function(chunkId, fragment)
{
var realHash = this.getHash(fragment);
var invalidHash = this.getHash("blah blah blah blah blah");
this.addHash(chunkId, realHash.slice(0, 4) + invalidHash.slice(4, 32));
},
addHash: function(chunkId, hash)
{
var partial = hash.slice(0, 4);
if (this.fragments[partial]) {
this.fragments[partial].push([chunkId, hash]);
} else {
this.fragments[partial] = [[chunkId, hash]];
}
},
compareQueries: function(fragments)
{
var expectedQueries = [];
for (var i = 0; i < fragments.length; i++) {
expectedQueries.push(this.getHash(fragments[i]).slice(0, 4));
}
expectedQueries.sort();
this.queries.sort();
for (var i = 0; i < this.queries.length; i++) {
do_check_eq(this.queries[i], expectedQueries[i]);
}
do_check_eq(this.queries.length, expectedQueries.length);
}
};
function setupCompleter(table, hits, conflicts, alwaysFail)
{
var completer = new DummyCompleter();
for (var i = 0; i < hits.length; i++) {
var chunkId = hits[i][0];
var fragments = hits[i][1];
for (var j = 0; j < fragments.length; j++) {
completer.addFragment(chunkId, fragments[j]);
}
}
for (var i = 0; i < conflicts.length; i++) {
var chunkId = conflicts[i][0];
var fragments = conflicts[i][1];
for (var j = 0; j < fragments.length; j++) {
completer.addConflict(chunkId, fragments[j]);
}
}
dbservice.setHashCompleter(table, completer);
return completer;
}
function installCompleter(table, fragments, conflictFragments)
{
return setupCompleter(table, fragments, conflictFragments, false);
}
function installFailingCompleter(table) {
return setupCompleter(table, [], [], true);
}
// Helper assertion for checking dummy completer queries
gAssertions.completerQueried = function(data, cb)
{
var completer = data[0];
completer.compareQueries(data[1]);
cb();
}
function doTest(updates, assertions)
{
doUpdateTest(updates, assertions, runNextTest, updateError);
}
// Test an add of two partial urls to a fresh database
function testPartialAdds() {
var addUrls = [ "foo.com/a", "foo.com/b", "bar.com/c" ];
var update = buildPhishingUpdate(
[
{ "chunkNum" : 1,
"urls" : addUrls
}],
4);
var completer = installCompleter('test-phish-simple', [[1, addUrls]], []);
var assertions = {
"tableData" : "test-phish-simple;a:1",
"urlsExist" : addUrls,
"completerQueried" : [completer, addUrls]
};
doTest([update], assertions);
}
function testPartialAddsWithConflicts() {
var addUrls = [ "foo.com/a", "foo.com/b", "bar.com/c" ];
var update = buildPhishingUpdate(
[
{ "chunkNum" : 1,
"urls" : addUrls
}],
4);
// Each result will have both a real match and a conflict
var completer = installCompleter('test-phish-simple',
[[1, addUrls]],
[[1, addUrls]]);
var assertions = {
"tableData" : "test-phish-simple;a:1",
"urlsExist" : addUrls,
"completerQueried" : [completer, addUrls]
};
doTest([update], assertions);
}
function testFalsePositives() {
var addUrls = [ "foo.com/a", "foo.com/b", "bar.com/c" ];
var update = buildPhishingUpdate(
[
{ "chunkNum" : 1,
"urls" : addUrls
}],
4);
// Each result will have no matching complete hashes and a non-matching
// conflict
var completer = installCompleter('test-phish-simple', [], [[1, addUrls]]);
var assertions = {
"tableData" : "test-phish-simple;a:1",
"urlsDontExist" : addUrls,
"completerQueried" : [completer, addUrls]
};
doTest([update], assertions);
}
function testEmptyCompleter() {
var addUrls = [ "foo.com/a", "foo.com/b", "bar.com/c" ];
var update = buildPhishingUpdate(
[
{ "chunkNum" : 1,
"urls" : addUrls
}],
4);
// Completer will never return full hashes
var completer = installCompleter('test-phish-simple', [], []);
var assertions = {
"tableData" : "test-phish-simple;a:1",
"urlsDontExist" : addUrls,
"completerQueried" : [completer, addUrls]
};
doTest([update], assertions);
}
function testCompleterFailure() {
var addUrls = [ "foo.com/a", "foo.com/b", "bar.com/c" ];
var update = buildPhishingUpdate(
[
{ "chunkNum" : 1,
"urls" : addUrls
}],
4);
// Completer will never return full hashes
var completer = installFailingCompleter('test-phish-simple');
var assertions = {
"tableData" : "test-phish-simple;a:1",
"urlsDontExist" : addUrls,
"completerQueried" : [completer, addUrls]
};
doTest([update], assertions);
}
function testMixedSizesSameDomain() {
var add1Urls = [ "foo.com/a" ];
var add2Urls = [ "foo.com/b" ];
var update1 = buildPhishingUpdate(
[
{ "chunkNum" : 1,
"urls" : add1Urls }],
4);
var update2 = buildPhishingUpdate(
[
{ "chunkNum" : 2,
"urls" : add2Urls }],
32);
// We should only need to complete the partial hashes
var completer = installCompleter('test-phish-simple', [[1, add1Urls]], []);
var assertions = {
"tableData" : "test-phish-simple;a:1-2",
// both urls should match...
"urlsExist" : add1Urls.concat(add2Urls),
// ... but the completer should only be queried for the partial entry
"completerQueried" : [completer, add1Urls]
};
doTest([update1, update2], assertions);
}
function testMixedSizesDifferentDomains() {
var add1Urls = [ "foo.com/a" ];
var add2Urls = [ "bar.com/b" ];
var update1 = buildPhishingUpdate(
[
{ "chunkNum" : 1,
"urls" : add1Urls }],
4);
var update2 = buildPhishingUpdate(
[
{ "chunkNum" : 2,
"urls" : add2Urls }],
32);
// We should only need to complete the partial hashes
var completer = installCompleter('test-phish-simple', [[1, add1Urls]], []);
var assertions = {
"tableData" : "test-phish-simple;a:1-2",
// both urls should match...
"urlsExist" : add1Urls.concat(add2Urls),
// ... but the completer should only be queried for the partial entry
"completerQueried" : [completer, add1Urls]
};
doTest([update1, update2], assertions);
}
function testMixedSizesNoCompleter() {
var add1Urls = [ "foo.com/a" ];
var add2Urls = [ "foo.com/b" ];
var update1 = buildPhishingUpdate(
[
{ "chunkNum" : 1,
"urls" : add1Urls }],
4);
var update2 = buildPhishingUpdate(
[
{ "chunkNum" : 2,
"urls" : add2Urls }],
32);
var assertions = {
"tableData" : "test-phish-simple;a:1-2",
// add1Urls shouldn't work, because there is no completer.
"urlsDontExist" : add1Urls,
// but add2Urls were complete, they should work.
"urlsExist" : add2Urls
};
doTest([update1, update2], assertions);
}
function testInvalidHashSize()
{
var addUrls = [ "foo.com/a", "foo.com/b", "bar.com/c" ];
var update = buildPhishingUpdate(
[
{ "chunkNum" : 1,
"urls" : addUrls
}],
12); // only 4 and 32 are legal hash sizes
var completer = installCompleter('test-phish-simple', [[1, addUrls]], []);
var assertions = {
"tableData" : "",
"urlsDontExist" : addUrls
};
// A successful update will trigger an error
doUpdateTest([update], assertions, updateError, runNextTest);
}
function testWrongTable()
{
var addUrls = [ "foo.com/a" ];
var update = buildPhishingUpdate(
[
{ "chunkNum" : 1,
"urls" : addUrls
}],
32);
var completer = installCompleter('test-malware-simple', // wrong table
[[1, addUrls]]);
doTest([update], assertions);
}
function testWrongChunk()
{
var addUrls = [ "foo.com/a" ];
var update = buildPhishingUpdate(
[
{ "chunkNum" : 1,
"urls" : addUrls
}],
32);
var completer = installCompleter('test-phish-simple',
[[2, // Wrong chunk number
addUrls]]);
doTest([update], assertions);
}
function run_test()
{
runTests([
testPartialAdds,
testPartialAddsWithConflicts,
testFalsePositives,
testEmptyCompleter,
testCompleterFailure,
testMixedSizesSameDomain,
testMixedSizesDifferentDomains,
testMixedSizesNoCompleter,
testInvalidHashSize
]);
}
do_test_pending();

View File

@ -13,18 +13,19 @@ function testSimpleForward() {
var add3Urls = [ "bar.com/d" ];
var update = "n:1000\n";
update += "i:test-phish-simple\n";
var update1 = buildPhishingUpdate(
var update1 = buildBareUpdate(
[{ "chunkNum" : 1,
"urls" : add1Urls }]);
update += "u:data:," + encodeURIComponent(update1) + "\n";
var update2 = buildPhishingUpdate(
var update2 = buildBareUpdate(
[{ "chunkNum" : 2,
"urls" : add2Urls }]);
update += "u:data:," + encodeURIComponent(update2) + "\n";
var update3 = buildPhishingUpdate(
var update3 = buildBareUpdate(
[{ "chunkNum" : 3,
"urls" : add3Urls }]);
update += "u:data:," + encodeURIComponent(update3) + "\n";
@ -44,15 +45,16 @@ function testNestedForward() {
var add2Urls = [ "foo.com/b" ];
var update = "n:1000\n";
update += "i:test-phish-simple\n";
var update1 = buildPhishingUpdate(
var update1 = buildBareUpdate(
[{ "chunkNum" : 1,
"urls" : add1Urls }]);
update += "u:data:," + encodeURIComponent(update1) + "\n";
var update2 = buildPhishingUpdate(
var update2 = buildBareUpdate(
[{ "chunkNum" : 2 }]);
var update3 = buildPhishingUpdate(
var update3 = buildBareUpdate(
[{ "chunkNum" : 3,
"urls" : add1Urls }]);
@ -102,13 +104,48 @@ function testErrorUrlForward() {
doTest([update], assertions, true);
}
function testMultipleTables() {
var add1Urls = [ "foo.com/a", "bar.com/c" ];
var add2Urls = [ "foo.com/b" ];
var add3Urls = [ "bar.com/d" ];
var update = "n:1000\n";
update += "i:test-phish-simple\n";
var update1 = buildBareUpdate(
[{ "chunkNum" : 1,
"urls" : add1Urls }]);
update += "u:data:," + encodeURIComponent(update1) + "\n";
var update2 = buildBareUpdate(
[{ "chunkNum" : 2,
"urls" : add2Urls }]);
update += "u:data:," + encodeURIComponent(update2) + "\n";
update += "i:test-malware-simple\n";
var update3 = buildBareUpdate(
[{ "chunkNum" : 3,
"urls" : add3Urls }]);
update += "u:data:," + encodeURIComponent(update3) + "\n";
var assertions = {
"tableData" : "test-malware-simple;a:3\ntest-phish-simple;a:1-2",
"urlsExist" : add1Urls.concat(add2Urls),
"malwareUrlsExist" : add3Urls
};
doTest([update], assertions, false);
}
function run_test()
{
runTests([
testSimpleForward,
testNestedForward,
testInvalidUrlForward,
testErrorUrlForward
testErrorUrlForward,
testMultipleTables
]);
}