Resolve a bunch of checkstyle warnings.

This commit is contained in:
Karsten Loesing 2016-04-30 08:24:55 +02:00
parent b41a71cd70
commit 8bd642198c
17 changed files with 564 additions and 398 deletions

View File

@ -5,7 +5,11 @@
<!--
Checkstyle configuration that checks the Google coding conventions from Google Java Style
that can be found at https://google.github.io/styleguide/javaguide.html.
that can be found at https://google.github.io/styleguide/javaguide.html with the following
modifications:
- Replaced com.google with org.torproject in import statement ordering
[CustomImportOrder].
Checkstyle is very configurable. Be sure to read the documentation at
http://checkstyle.sf.net (or in your downloaded distribution).
@ -159,7 +163,7 @@
<module name="OverloadMethodsDeclarationOrder"/>
<module name="VariableDeclarationUsageDistance"/>
<module name="CustomImportOrder">
<property name="specialImportsRegExp" value="com.google"/>
<property name="specialImportsRegExp" value="org.torproject"/>
<property name="sortImportsInGroupAlphabetically" value="true"/>
<property name="customImportOrderRules" value="STATIC###SPECIAL_IMPORTS###THIRD_PARTY_PACKAGE###STANDARD_JAVA_PACKAGE"/>
</module>

View File

@ -1,5 +1,6 @@
/* Copyright 2010--2012 The Tor Project
/* Copyright 2010--2016 The Tor Project
* See LICENSE for licensing information */
package org.torproject.collector.bridgedescs;
import java.io.BufferedReader;
@ -9,13 +10,17 @@ import java.util.logging.Level;
import java.util.logging.Logger;
public class BridgeDescriptorParser {
private SanitizedBridgesWriter sbw;
private Logger logger;
public BridgeDescriptorParser(SanitizedBridgesWriter sbw) {
this.sbw = sbw;
this.logger =
Logger.getLogger(BridgeDescriptorParser.class.getName());
}
public void parse(byte[] allData, String dateTime) {
try {
BufferedReader br = new BufferedReader(new StringReader(

View File

@ -1,7 +1,13 @@
/* Copyright 2010--2012 The Tor Project
/* Copyright 2010--2016 The Tor Project
* See LICENSE for licensing information */
package org.torproject.collector.bridgedescs;
import org.apache.commons.codec.binary.Hex;
import org.apache.commons.codec.digest.DigestUtils;
import org.apache.commons.compress.archivers.tar.TarArchiveInputStream;
import org.apache.commons.compress.compressors.gzip.GzipCompressorInputStream;
import java.io.BufferedInputStream;
import java.io.BufferedReader;
import java.io.BufferedWriter;
@ -20,11 +26,6 @@ import java.util.TreeSet;
import java.util.logging.Level;
import java.util.logging.Logger;
import org.apache.commons.codec.binary.Hex;
import org.apache.commons.codec.digest.DigestUtils;
import org.apache.commons.compress.archivers.tar.TarArchiveInputStream;
import org.apache.commons.compress.compressors.gzip.GzipCompressorInputStream;
/**
* Reads the half-hourly snapshots of bridge descriptors from Tonga.
*/
@ -32,8 +33,8 @@ public class BridgeSnapshotReader {
public BridgeSnapshotReader(BridgeDescriptorParser bdp,
File bridgeDirectoriesDir, File statsDirectory) {
if (bdp == null || bridgeDirectoriesDir == null ||
statsDirectory == null) {
if (bdp == null || bridgeDirectoriesDir == null
|| statsDirectory == null) {
throw new IllegalArgumentException();
}
@ -62,11 +63,15 @@ public class BridgeSnapshotReader {
}
}
logger.fine("Importing files in directory " + bridgeDirectoriesDir
+ "/...");
+ "/...");
Set<String> descriptorImportHistory = new HashSet<String>();
int parsedFiles = 0, skippedFiles = 0, parsedStatuses = 0,
parsedServerDescriptors = 0, skippedServerDescriptors = 0,
parsedExtraInfoDescriptors = 0, skippedExtraInfoDescriptors = 0;
int parsedFiles = 0;
int skippedFiles = 0;
int parsedStatuses = 0;
int parsedServerDescriptors = 0;
int skippedServerDescriptors = 0;
int parsedExtraInfoDescriptors = 0;
int skippedExtraInfoDescriptors = 0;
Stack<File> filesInInputDir = new Stack<File>();
filesInInputDir.add(bdDir);
while (!filesInInputDir.isEmpty()) {
@ -118,9 +123,9 @@ public class BridgeSnapshotReader {
break;
}
}
if (firstLine.startsWith("published ") ||
firstLine.startsWith("flag-thresholds ") ||
firstLine.startsWith("r ")) {
if (firstLine.startsWith("published ")
|| firstLine.startsWith("flag-thresholds ")
|| firstLine.startsWith("r ")) {
bdp.parse(allData, dateTime);
parsedStatuses++;
} else if (descriptorImportHistory.contains(fileDigest)) {
@ -129,10 +134,11 @@ public class BridgeSnapshotReader {
skippedFiles++;
continue;
} else {
int start = -1, sig = -1, end = -1;
String startToken =
firstLine.startsWith("router ") ?
"router " : "extra-info ";
int start = -1;
int sig = -1;
int end = -1;
String startToken = firstLine.startsWith("router ")
? "router " : "extra-info ";
String sigToken = "\nrouter-signature\n";
String endToken = "\n-----END SIGNATURE-----\n";
while (end < ascii.length()) {

View File

@ -1,7 +1,17 @@
/* Copyright 2010--2016 The Tor Project
* See LICENSE for licensing information */
package org.torproject.collector.bridgedescs;
import org.torproject.collector.main.Configuration;
import org.torproject.collector.main.LockFile;
import org.torproject.collector.main.LoggingConfiguration;
import org.apache.commons.codec.DecoderException;
import org.apache.commons.codec.binary.Base64;
import org.apache.commons.codec.binary.Hex;
import org.apache.commons.codec.digest.DigestUtils;
import java.io.BufferedReader;
import java.io.BufferedWriter;
import java.io.File;
@ -25,14 +35,6 @@ import java.util.TreeMap;
import java.util.logging.Level;
import java.util.logging.Logger;
import org.apache.commons.codec.DecoderException;
import org.apache.commons.codec.binary.Base64;
import org.apache.commons.codec.binary.Hex;
import org.apache.commons.codec.digest.DigestUtils;
import org.torproject.collector.main.Configuration;
import org.torproject.collector.main.LockFile;
import org.torproject.collector.main.LoggingConfiguration;
/**
* Sanitizes bridge descriptors, i.e., removes all possibly sensitive
* information from them, and writes them to a local directory structure.
@ -125,8 +127,8 @@ public class SanitizedBridgesWriter extends Thread {
config.getLimitBridgeDescriptorMappings();
File statsDirectory = new File("stats");
if (bridgeDirectoriesDirectory == null ||
sanitizedBridgesDirectory == null || statsDirectory == null) {
if (bridgeDirectoriesDirectory == null
|| sanitizedBridgesDirectory == null || statsDirectory == null) {
throw new IllegalArgumentException();
}
@ -169,9 +171,9 @@ public class SanitizedBridgesWriter extends Thread {
String line;
while ((line = br.readLine()) != null) {
String[] parts = line.split(",");
if ((line.length() != ("yyyy-MM,".length() + 31 * 2) &&
line.length() != ("yyyy-MM,".length() + 50 * 2)) ||
parts.length != 2) {
if ((line.length() != ("yyyy-MM,".length() + 31 * 2)
&& line.length() != ("yyyy-MM,".length() + 50 * 2))
|| parts.length != 2) {
this.logger.warning("Invalid line in bridge-ip-secrets file "
+ "starting with '" + line.substring(0, 7) + "'! "
+ "Not calculating any IP address hashes in this "
@ -364,8 +366,8 @@ public class SanitizedBridgesWriter extends Thread {
}
private byte[] getSecretForMonth(String month) throws IOException {
if (!this.secretsForHashingIPAddresses.containsKey(month) ||
this.secretsForHashingIPAddresses.get(month).length == 31) {
if (!this.secretsForHashingIPAddresses.containsKey(month)
|| this.secretsForHashingIPAddresses.get(month).length == 31) {
byte[] secret = new byte[50];
this.secureRandom.nextBytes(secret);
if (this.secretsForHashingIPAddresses.containsKey(month)) {
@ -420,8 +422,8 @@ public class SanitizedBridgesWriter extends Thread {
maxNetworkStatusPublishedTime = publicationTime;
}
if (this.bridgeSanitizingCutOffTimestamp.
compareTo(publicationTime) > 0) {
if (this.bridgeSanitizingCutOffTimestamp
.compareTo(publicationTime) > 0) {
this.logger.log(!this.haveWarnedAboutInterval ? Level.WARNING
: Level.FINE, "Sanitizing and storing network status with "
+ "publication time outside our descriptor sanitizing "
@ -476,9 +478,9 @@ public class SanitizedBridgesWriter extends Thread {
String dirPort = parts[8];
/* Determine most recent descriptor publication time. */
if (descPublicationTime.compareTo(publicationTime) <= 0 &&
(mostRecentDescPublished == null ||
descPublicationTime.compareTo(
if (descPublicationTime.compareTo(publicationTime) <= 0
&& (mostRecentDescPublished == null
|| descPublicationTime.compareTo(
mostRecentDescPublished) > 0)) {
mostRecentDescPublished = descPublicationTime;
}
@ -515,9 +517,9 @@ public class SanitizedBridgesWriter extends Thread {
}
/* Nothing special about s, w, and p lines; just copy them. */
} else if (line.startsWith("s ") || line.equals("s") ||
line.startsWith("w ") || line.equals("w") ||
line.startsWith("p ") || line.equals("p")) {
} else if (line.startsWith("s ") || line.equals("s")
|| line.startsWith("w ") || line.equals("w")
|| line.startsWith("p ") || line.equals("p")) {
scrubbed.append(line + "\n");
/* There should be nothing else but r, w, p, and s lines in the
@ -541,9 +543,9 @@ public class SanitizedBridgesWriter extends Thread {
SimpleDateFormat formatter = new SimpleDateFormat(
"yyyy-MM-dd HH:mm:ss");
formatter.setTimeZone(TimeZone.getTimeZone("UTC"));
if (formatter.parse(publicationTime).getTime() -
formatter.parse(mostRecentDescPublished).getTime() >
60L * 60L * 1000L) {
if (formatter.parse(publicationTime).getTime()
- formatter.parse(mostRecentDescPublished).getTime()
> 60L * 60L * 1000L) {
this.logger.warning("The most recent descriptor in the bridge "
+ "network status published at " + publicationTime + " was "
+ "published at " + mostRecentDescPublished + " which is "
@ -609,16 +611,21 @@ public class SanitizedBridgesWriter extends Thread {
}
/* Parse descriptor to generate a sanitized version. */
String scrubbedDesc = null, published = null,
masterKeyEd25519FromIdentityEd25519 = null;
String scrubbedDesc = null;
String published = null;
String masterKeyEd25519FromIdentityEd25519 = null;
try {
BufferedReader br = new BufferedReader(new StringReader(
new String(data, "US-ASCII")));
StringBuilder scrubbed = new StringBuilder();
String line = null, hashedBridgeIdentity = null, address = null,
routerLine = null, scrubbedAddress = null,
masterKeyEd25519 = null;
List<String> orAddresses = null, scrubbedOrAddresses = null;
String line = null;
String hashedBridgeIdentity = null;
String address = null;
String routerLine = null;
String scrubbedAddress = null;
String masterKeyEd25519 = null;
List<String> orAddresses = null;
List<String> scrubbedOrAddresses = null;
boolean skipCrypto = false;
while ((line = br.readLine()) != null) {
@ -649,8 +656,8 @@ public class SanitizedBridgesWriter extends Thread {
if (published.compareTo(maxServerDescriptorPublishedTime) > 0) {
maxServerDescriptorPublishedTime = published;
}
if (this.bridgeSanitizingCutOffTimestamp.
compareTo(published) > 0) {
if (this.bridgeSanitizingCutOffTimestamp
.compareTo(published) > 0) {
this.logger.log(!this.haveWarnedAboutInterval
? Level.WARNING : Level.FINE, "Sanitizing and storing "
+ "server descriptor with publication time outside our "
@ -661,15 +668,15 @@ public class SanitizedBridgesWriter extends Thread {
/* Parse the fingerprint to determine the hashed bridge
* identity. */
} else if (line.startsWith("opt fingerprint ") ||
line.startsWith("fingerprint ")) {
String fingerprint = line.substring(line.startsWith("opt ") ?
"opt fingerprint".length() : "fingerprint".length()).
replaceAll(" ", "").toLowerCase();
} else if (line.startsWith("opt fingerprint ")
|| line.startsWith("fingerprint ")) {
String fingerprint = line.substring(line.startsWith("opt ")
? "opt fingerprint".length() : "fingerprint".length())
.replaceAll(" ", "").toLowerCase();
byte[] fingerprintBytes = Hex.decodeHex(
fingerprint.toCharArray());
hashedBridgeIdentity = DigestUtils.shaHex(fingerprintBytes).
toLowerCase();
hashedBridgeIdentity = DigestUtils.shaHex(fingerprintBytes)
.toLowerCase();
try {
scrubbedAddress = scrubIpv4Address(address, fingerprintBytes,
published);
@ -695,9 +702,10 @@ public class SanitizedBridgesWriter extends Thread {
}
scrubbed.append((line.startsWith("opt ") ? "opt " : "")
+ "fingerprint");
for (int i = 0; i < hashedBridgeIdentity.length() / 4; i++)
for (int i = 0; i < hashedBridgeIdentity.length() / 4; i++) {
scrubbed.append(" " + hashedBridgeIdentity.substring(4 * i,
4 * (i + 1)).toUpperCase());
}
scrubbed.append("\n");
/* Replace the contact line (if present) with a generic one. */
@ -722,8 +730,8 @@ public class SanitizedBridgesWriter extends Thread {
/* Replace extra-info digest with the hashed digest of the
* non-scrubbed descriptor. */
} else if (line.startsWith("opt extra-info-digest ") ||
line.startsWith("extra-info-digest ")) {
} else if (line.startsWith("opt extra-info-digest ")
|| line.startsWith("extra-info-digest ")) {
String[] parts = line.split(" ");
if (line.startsWith("opt ")) {
scrubbed.append("opt ");
@ -733,8 +741,8 @@ public class SanitizedBridgesWriter extends Thread {
Hex.decodeHex(parts[1].toCharArray())).toUpperCase());
if (parts.length > 2) {
scrubbed.append(" " + Base64.encodeBase64String(
DigestUtils.sha256(Base64.decodeBase64(parts[2]))).
replaceAll("=", ""));
DigestUtils.sha256(Base64.decodeBase64(parts[2])))
.replaceAll("=", ""));
}
scrubbed.append("\n");
@ -752,8 +760,8 @@ public class SanitizedBridgesWriter extends Thread {
/* Extract master-key-ed25519 from identity-ed25519. */
} else if (line.equals("identity-ed25519")) {
StringBuilder sb = new StringBuilder();
while ((line = br.readLine()) != null &&
!line.equals("-----END ED25519 CERT-----")) {
while ((line = br.readLine()) != null
&& !line.equals("-----END ED25519 CERT-----")) {
if (line.equals("-----BEGIN ED25519 CERT-----")) {
continue;
}
@ -764,8 +772,8 @@ public class SanitizedBridgesWriter extends Thread {
sb.toString());
String sha256MasterKeyEd25519 = Base64.encodeBase64String(
DigestUtils.sha256(Base64.decodeBase64(
masterKeyEd25519FromIdentityEd25519 + "="))).
replaceAll("=", "");
masterKeyEd25519FromIdentityEd25519 + "=")))
.replaceAll("=", "");
scrubbed.append("master-key-ed25519 " + sha256MasterKeyEd25519
+ "\n");
if (masterKeyEd25519 != null && !masterKeyEd25519.equals(
@ -778,8 +786,8 @@ public class SanitizedBridgesWriter extends Thread {
/* Verify that identity-ed25519 and master-key-ed25519 match. */
} else if (line.startsWith("master-key-ed25519 ")) {
masterKeyEd25519 = line.substring(line.indexOf(" ") + 1);
if (masterKeyEd25519FromIdentityEd25519 != null &&
!masterKeyEd25519FromIdentityEd25519.equals(
if (masterKeyEd25519FromIdentityEd25519 != null
&& !masterKeyEd25519FromIdentityEd25519.equals(
masterKeyEd25519)) {
this.logger.warning("Mismatch between identity-ed25519 and "
+ "master-key-ed25519. Skipping.");
@ -829,9 +837,9 @@ public class SanitizedBridgesWriter extends Thread {
/* Skip all crypto parts that might leak the bridge's identity
* fingerprint. */
} else if (line.startsWith("-----BEGIN ")
|| line.equals("onion-key") || line.equals("signing-key") ||
line.equals("onion-key-crosscert") ||
line.startsWith("ntor-onion-key-crosscert ")) {
|| line.equals("onion-key") || line.equals("signing-key")
|| line.equals("onion-key-crosscert")
|| line.startsWith("ntor-onion-key-crosscert ")) {
skipCrypto = true;
/* Stop skipping lines when the crypto parts are over. */
@ -893,8 +901,8 @@ public class SanitizedBridgesWriter extends Thread {
byte[] forDigest = new byte[sig - start];
System.arraycopy(data, start, forDigest, 0, sig - start);
descriptorDigestSha256Base64 = Base64.encodeBase64String(
DigestUtils.sha256(DigestUtils.sha256(forDigest))).
replaceAll("=", "");
DigestUtils.sha256(DigestUtils.sha256(forDigest)))
.replaceAll("=", "");
}
} catch (UnsupportedEncodingException e) {
/* Handle below. */
@ -1010,14 +1018,16 @@ public class SanitizedBridgesWriter extends Thread {
public void sanitizeAndStoreExtraInfoDescriptor(byte[] data) {
/* Parse descriptor to generate a sanitized version. */
String scrubbedDesc = null, published = null,
masterKeyEd25519FromIdentityEd25519 = null;
String scrubbedDesc = null;
String published = null;
String masterKeyEd25519FromIdentityEd25519 = null;
try {
BufferedReader br = new BufferedReader(new StringReader(new String(
data, "US-ASCII")));
String line = null;
StringBuilder scrubbed = null;
String hashedBridgeIdentity = null, masterKeyEd25519 = null;
String hashedBridgeIdentity = null;
String masterKeyEd25519 = null;
while ((line = br.readLine()) != null) {
/* Parse bridge identity from extra-info line and replace it with
@ -1054,8 +1064,8 @@ public class SanitizedBridgesWriter extends Thread {
/* Extract master-key-ed25519 from identity-ed25519. */
} else if (line.equals("identity-ed25519")) {
StringBuilder sb = new StringBuilder();
while ((line = br.readLine()) != null &&
!line.equals("-----END ED25519 CERT-----")) {
while ((line = br.readLine()) != null
&& !line.equals("-----END ED25519 CERT-----")) {
if (line.equals("-----BEGIN ED25519 CERT-----")) {
continue;
}
@ -1066,8 +1076,8 @@ public class SanitizedBridgesWriter extends Thread {
sb.toString());
String sha256MasterKeyEd25519 = Base64.encodeBase64String(
DigestUtils.sha256(Base64.decodeBase64(
masterKeyEd25519FromIdentityEd25519 + "="))).
replaceAll("=", "");
masterKeyEd25519FromIdentityEd25519 + "=")))
.replaceAll("=", "");
scrubbed.append("master-key-ed25519 " + sha256MasterKeyEd25519
+ "\n");
if (masterKeyEd25519 != null && !masterKeyEd25519.equals(
@ -1080,8 +1090,8 @@ public class SanitizedBridgesWriter extends Thread {
/* Verify that identity-ed25519 and master-key-ed25519 match. */
} else if (line.startsWith("master-key-ed25519 ")) {
masterKeyEd25519 = line.substring(line.indexOf(" ") + 1);
if (masterKeyEd25519FromIdentityEd25519 != null &&
!masterKeyEd25519FromIdentityEd25519.equals(
if (masterKeyEd25519FromIdentityEd25519 != null
&& !masterKeyEd25519FromIdentityEd25519.equals(
masterKeyEd25519)) {
this.logger.warning("Mismatch between identity-ed25519 and "
+ "master-key-ed25519. Skipping.");
@ -1169,8 +1179,8 @@ public class SanitizedBridgesWriter extends Thread {
byte[] forDigest = new byte[sig - start];
System.arraycopy(data, start, forDigest, 0, sig - start);
descriptorDigestSha256Base64 = Base64.encodeBase64String(
DigestUtils.sha256(DigestUtils.sha256(forDigest))).
replaceAll("=", "");
DigestUtils.sha256(DigestUtils.sha256(forDigest)))
.replaceAll("=", "");
}
} catch (UnsupportedEncodingException e) {
/* Handle below. */
@ -1230,11 +1240,12 @@ public class SanitizedBridgesWriter extends Thread {
public void finishWriting() {
/* Delete secrets that we don't need anymore. */
if (!this.secretsForHashingIPAddresses.isEmpty() &&
this.secretsForHashingIPAddresses.firstKey().compareTo(
if (!this.secretsForHashingIPAddresses.isEmpty()
&& this.secretsForHashingIPAddresses.firstKey().compareTo(
this.bridgeSanitizingCutOffTimestamp) < 0) {
try {
int kept = 0, deleted = 0;
int kept = 0;
int deleted = 0;
BufferedWriter bw = new BufferedWriter(new FileWriter(
this.bridgeIpSecretsFile));
for (Map.Entry<String, byte[]> e :
@ -1267,26 +1278,26 @@ public class SanitizedBridgesWriter extends Thread {
try {
long maxNetworkStatusPublishedMillis =
dateTimeFormat.parse(maxNetworkStatusPublishedTime).getTime();
if (maxNetworkStatusPublishedMillis > 0L &&
maxNetworkStatusPublishedMillis < tooOldMillis) {
if (maxNetworkStatusPublishedMillis > 0L
&& maxNetworkStatusPublishedMillis < tooOldMillis) {
this.logger.warning("The last known bridge network status was "
+ "published " + maxNetworkStatusPublishedTime + ", which is "
+ "more than 5:30 hours in the past.");
}
long maxServerDescriptorPublishedMillis =
dateTimeFormat.parse(maxServerDescriptorPublishedTime).
getTime();
if (maxServerDescriptorPublishedMillis > 0L &&
maxServerDescriptorPublishedMillis < tooOldMillis) {
dateTimeFormat.parse(maxServerDescriptorPublishedTime)
.getTime();
if (maxServerDescriptorPublishedMillis > 0L
&& maxServerDescriptorPublishedMillis < tooOldMillis) {
this.logger.warning("The last known bridge server descriptor was "
+ "published " + maxServerDescriptorPublishedTime + ", which "
+ "is more than 5:30 hours in the past.");
}
long maxExtraInfoDescriptorPublishedMillis =
dateTimeFormat.parse(maxExtraInfoDescriptorPublishedTime).
getTime();
if (maxExtraInfoDescriptorPublishedMillis > 0L &&
maxExtraInfoDescriptorPublishedMillis < tooOldMillis) {
dateTimeFormat.parse(maxExtraInfoDescriptorPublishedTime)
.getTime();
if (maxExtraInfoDescriptorPublishedMillis > 0L
&& maxExtraInfoDescriptorPublishedMillis < tooOldMillis) {
this.logger.warning("The last known bridge extra-info descriptor "
+ "was published " + maxExtraInfoDescriptorPublishedTime
+ ", which is more than 5:30 hours in the past.");

View File

@ -1,7 +1,17 @@
/* Copyright 2011--2016 The Tor Project
* See LICENSE for licensing information */
package org.torproject.collector.bridgepools;
import org.torproject.collector.main.Configuration;
import org.torproject.collector.main.LockFile;
import org.torproject.collector.main.LoggingConfiguration;
import org.apache.commons.codec.DecoderException;
import org.apache.commons.codec.binary.Hex;
import org.apache.commons.codec.digest.DigestUtils;
import org.apache.commons.compress.compressors.gzip.GzipCompressorInputStream;
import java.io.BufferedReader;
import java.io.BufferedWriter;
import java.io.File;
@ -24,14 +34,6 @@ import java.util.TreeSet;
import java.util.logging.Level;
import java.util.logging.Logger;
import org.apache.commons.codec.DecoderException;
import org.apache.commons.codec.binary.Hex;
import org.apache.commons.codec.digest.DigestUtils;
import org.apache.commons.compress.compressors.gzip.GzipCompressorInputStream;
import org.torproject.collector.main.Configuration;
import org.torproject.collector.main.LockFile;
import org.torproject.collector.main.LoggingConfiguration;
public class BridgePoolAssignmentsProcessor extends Thread {
public static void main(String[] args) {
@ -77,8 +79,8 @@ public class BridgePoolAssignmentsProcessor extends Thread {
Logger logger =
Logger.getLogger(BridgePoolAssignmentsProcessor.class.getName());
if (assignmentsDirectory == null ||
sanitizedAssignmentsDirectory == null) {
if (assignmentsDirectory == null
|| sanitizedAssignmentsDirectory == null) {
IllegalArgumentException e = new IllegalArgumentException("Neither "
+ "assignmentsDirectory nor sanitizedAssignmentsDirectory may "
+ "be null!");
@ -117,9 +119,11 @@ public class BridgePoolAssignmentsProcessor extends Thread {
} else {
br = new BufferedReader(new FileReader(assignmentFile));
}
String line, bridgePoolAssignmentLine = null;
String line;
String bridgePoolAssignmentLine = null;
SortedSet<String> sanitizedAssignments = new TreeSet<String>();
boolean wroteLastLine = false, skipBefore20120504125947 = true;
boolean wroteLastLine = false;
boolean skipBefore20120504125947 = true;
Set<String> hashedFingerprints = null;
while ((line = br.readLine()) != null || !wroteLastLine) {
if (line != null && line.startsWith("bridge-pool-assignment ")) {
@ -142,8 +146,8 @@ public class BridgePoolAssignmentsProcessor extends Thread {
continue;
}
}
if (line == null ||
line.startsWith("bridge-pool-assignment ")) {
if (line == null
|| line.startsWith("bridge-pool-assignment ")) {
if (bridgePoolAssignmentLine != null) {
try {
long bridgePoolAssignmentTime = assignmentFormat.parse(
@ -235,8 +239,8 @@ public class BridgePoolAssignmentsProcessor extends Thread {
+ "starting with '" + duplicateFingerprint + "'.");
}
if (maxBridgePoolAssignmentTime > 0L &&
maxBridgePoolAssignmentTime + 330L * 60L * 1000L
if (maxBridgePoolAssignmentTime > 0L
&& maxBridgePoolAssignmentTime + 330L * 60L * 1000L
< System.currentTimeMillis()) {
SimpleDateFormat dateTimeFormat = new SimpleDateFormat(
"yyyy-MM-dd HH:mm:ss");

View File

@ -1,7 +1,17 @@
/* Copyright 2010--2016 The Tor Project
* See LICENSE for licensing information */
package org.torproject.collector.exitlists;
import org.torproject.collector.main.Configuration;
import org.torproject.collector.main.LockFile;
import org.torproject.collector.main.LoggingConfiguration;
import org.torproject.descriptor.Descriptor;
import org.torproject.descriptor.DescriptorParseException;
import org.torproject.descriptor.DescriptorParser;
import org.torproject.descriptor.DescriptorSourceFactory;
import org.torproject.descriptor.ExitList;
import java.io.BufferedInputStream;
import java.io.BufferedWriter;
import java.io.File;
@ -20,15 +30,6 @@ import java.util.TreeSet;
import java.util.logging.Level;
import java.util.logging.Logger;
import org.torproject.descriptor.Descriptor;
import org.torproject.descriptor.DescriptorParser;
import org.torproject.descriptor.DescriptorSourceFactory;
import org.torproject.descriptor.ExitList;
import org.torproject.descriptor.DescriptorParseException;
import org.torproject.collector.main.Configuration;
import org.torproject.collector.main.LockFile;
import org.torproject.collector.main.LoggingConfiguration;
public class ExitListDownloader extends Thread {
public static void main(String[] args) {
@ -85,8 +86,8 @@ public class ExitListDownloader extends Thread {
huc.connect();
int response = huc.getResponseCode();
if (response != 200) {
logger.warning("Could not download exit list. Response code " +
response);
logger.warning("Could not download exit list. Response code "
+ response);
return;
}
BufferedInputStream in = new BufferedInputStream(
@ -121,8 +122,8 @@ public class ExitListDownloader extends Thread {
List<Descriptor> parsedDescriptors =
descriptorParser.parseDescriptors(downloadedExitList.getBytes(),
tarballFile.getName());
if (parsedDescriptors.size() != 1 ||
!(parsedDescriptors.get(0) instanceof ExitList)) {
if (parsedDescriptors.size() != 1
|| !(parsedDescriptors.get(0) instanceof ExitList)) {
logger.warning("Could not parse downloaded exit list");
return;
}
@ -136,12 +137,12 @@ public class ExitListDownloader extends Thread {
logger.log(Level.WARNING, "Could not parse downloaded exit list",
e);
}
if (maxScanMillis > 0L &&
maxScanMillis + 330L * 60L * 1000L < System.currentTimeMillis()) {
if (maxScanMillis > 0L
&& maxScanMillis + 330L * 60L * 1000L < System.currentTimeMillis()) {
logger.warning("The last reported scan in the downloaded exit list "
+ "took place at " + dateTimeFormat.format(maxScanMillis)
+ ", which is more than 5:30 hours in the past.");
}
}
/* Write to disk. */
File rsyncFile = new File("recent/exit-lists/"

View File

@ -1,7 +1,14 @@
/* Copyright 2015 The Tor Project
/* Copyright 2015--2016 The Tor Project
* See LICENSE for licensing information */
package org.torproject.collector.index;
import com.google.gson.Gson;
import com.google.gson.GsonBuilder;
import org.apache.commons.compress.compressors.bzip2.BZip2CompressorOutputStream;
import org.apache.commons.compress.compressors.xz.XZCompressorOutputStream;
import java.io.BufferedWriter;
import java.io.File;
import java.io.FileOutputStream;
@ -17,12 +24,6 @@ import java.util.TimeZone;
import java.util.TreeSet;
import java.util.zip.GZIPOutputStream;
import org.apache.commons.compress.compressors.bzip2.BZip2CompressorOutputStream;
import org.apache.commons.compress.compressors.xz.XZCompressorOutputStream;
import com.google.gson.Gson;
import com.google.gson.GsonBuilder;
/* Create a fresh index.json containing all directories and files in the
* archive/ and recent/ directories.
*
@ -53,12 +54,14 @@ public class CreateIndexJson {
String path;
SortedSet<FileNode> files;
SortedSet<DirectoryNode> directories;
DirectoryNode(String path, SortedSet<FileNode> files,
SortedSet<DirectoryNode> directories) {
this.path = path;
this.files = files;
this.directories = directories;
}
public int compareTo(DirectoryNode o) {
return this.path.compareTo(o.path);
}
@ -69,6 +72,7 @@ public class CreateIndexJson {
String path;
SortedSet<FileNode> files;
SortedSet<DirectoryNode> directories;
IndexNode(String index_created, String path,
SortedSet<FileNode> files,
SortedSet<DirectoryNode> directories) {
@ -83,17 +87,20 @@ public class CreateIndexJson {
String path;
long size;
String last_modified;
FileNode(String path, long size, String last_modified) {
this.path = path;
this.size = size;
this.last_modified = last_modified;
}
public int compareTo(FileNode o) {
return this.path.compareTo(o.path);
}
}
static DateFormat dateTimeFormat;
static {
dateTimeFormat = new SimpleDateFormat(dateTimePattern,
dateTimeLocale);

View File

@ -1,5 +1,6 @@
/* Copyright 2010--2012 The Tor Project
/* Copyright 2010--2016 The Tor Project
* See LICENSE for licensing information */
package org.torproject.collector.main;
import java.io.BufferedReader;
@ -67,6 +68,7 @@ public class Configuration {
private String torperfOutputDirectory = "out/torperf/";
private SortedMap<String, String> torperfSources = null;
private List<String> torperfFiles = null;
public Configuration() {
/* Initialize logger. */
@ -219,84 +221,111 @@ public class Configuration {
System.exit(1);
}
}
public String getDirectoryArchivesOutputDirectory() {
return this.directoryArchivesOutputDirectory;
}
public boolean getImportCachedRelayDescriptors() {
return this.importCachedRelayDescriptors;
}
public List<String> getCachedRelayDescriptorDirectory() {
return this.cachedRelayDescriptorsDirectory;
}
public boolean getImportDirectoryArchives() {
return this.importDirectoryArchives;
}
public String getDirectoryArchivesDirectory() {
return this.directoryArchivesDirectory;
}
public boolean getKeepDirectoryArchiveImportHistory() {
return this.keepDirectoryArchiveImportHistory;
}
public boolean getReplaceIPAddressesWithHashes() {
return this.replaceIPAddressesWithHashes;
}
public long getLimitBridgeDescriptorMappings() {
return this.limitBridgeDescriptorMappings;
}
public String getSanitizedBridgesWriteDirectory() {
return this.sanitizedBridgesWriteDirectory;
}
public String getBridgeSnapshotsDirectory() {
return this.bridgeSnapshotsDirectory;
}
public boolean getDownloadRelayDescriptors() {
return this.downloadRelayDescriptors;
}
public List<String> getDownloadFromDirectoryAuthorities() {
return this.downloadFromDirectoryAuthorities;
}
public List<String> getDownloadVotesByFingerprint() {
return this.downloadVotesByFingerprint;
}
public boolean getDownloadCurrentConsensus() {
return this.downloadCurrentConsensus;
}
public boolean getDownloadCurrentMicrodescConsensus() {
return this.downloadCurrentMicrodescConsensus;
}
public boolean getDownloadCurrentVotes() {
return this.downloadCurrentVotes;
}
public boolean getDownloadMissingServerDescriptors() {
return this.downloadMissingServerDescriptors;
}
public boolean getDownloadMissingExtraInfoDescriptors() {
return this.downloadMissingExtraInfoDescriptors;
}
public boolean getDownloadMissingMicrodescriptors() {
return this.downloadMissingMicrodescriptors;
}
public boolean getDownloadAllServerDescriptors() {
return this.downloadAllServerDescriptors;
}
public boolean getDownloadAllExtraInfoDescriptors() {
return this.downloadAllExtraInfoDescriptors;
}
public boolean getCompressRelayDescriptorDownloads() {
return this.compressRelayDescriptorDownloads;
}
public String getAssignmentsDirectory() {
return this.assignmentsDirectory;
}
public String getSanitizedAssignmentsDirectory() {
return this.sanitizedAssignmentsDirectory;
}
public String getTorperfOutputDirectory() {
return this.torperfOutputDirectory;
}
public SortedMap<String, String> getTorperfSources() {
return this.torperfSources;
}
public List<String> getTorperfFiles() {
return this.torperfFiles;
}

View File

@ -1,5 +1,6 @@
/* Copyright 2010--2012 The Tor Project
/* Copyright 2010--2016 The Tor Project
* See LICENSE for licensing information */
package org.torproject.collector.main;
import java.io.BufferedReader;

View File

@ -1,5 +1,6 @@
/* Copyright 2010--2016 The Tor Project
* See LICENSE for licensing information */
package org.torproject.collector.main;
import java.io.File;

View File

@ -1,7 +1,12 @@
/* Copyright 2010--2014 The Tor Project
/* Copyright 2010--2016 The Tor Project
* See LICENSE for licensing information */
package org.torproject.collector.relaydescs;
import org.apache.commons.codec.binary.Base64;
import org.apache.commons.codec.digest.DigestUtils;
import org.apache.commons.compress.compressors.bzip2.BZip2CompressorInputStream;
import java.io.BufferedInputStream;
import java.io.BufferedReader;
import java.io.BufferedWriter;
@ -28,10 +33,6 @@ import java.util.TreeSet;
import java.util.logging.Level;
import java.util.logging.Logger;
import org.apache.commons.codec.binary.Base64;
import org.apache.commons.codec.digest.DigestUtils;
import org.apache.commons.compress.compressors.bzip2.BZip2CompressorInputStream;
/**
* Read in all files in a given directory and pass buffered readers of
* them to the relay descriptor parser.
@ -40,13 +41,14 @@ public class ArchiveReader {
public ArchiveReader(RelayDescriptorParser rdp, File archivesDirectory,
File statsDirectory, boolean keepImportHistory) {
if (rdp == null || archivesDirectory == null ||
statsDirectory == null) {
if (rdp == null || archivesDirectory == null
|| statsDirectory == null) {
throw new IllegalArgumentException();
}
rdp.setArchiveReader(this);
int parsedFiles = 0, ignoredFiles = 0;
int parsedFiles = 0;
int ignoredFiles = 0;
Logger logger = Logger.getLogger(ArchiveReader.class.getName());
SortedSet<String> archivesImportHistory = new TreeSet<String>();
File archivesImportHistoryFile = new File(statsDirectory,
@ -82,8 +84,8 @@ public class ArchiveReader {
if (rdp != null) {
try {
BufferedInputStream bis = null;
if (keepImportHistory &&
archivesImportHistory.contains(pop.getName())) {
if (keepImportHistory
&& archivesImportHistory.contains(pop.getName())) {
ignoredFiles++;
continue;
} else if (pop.getName().endsWith(".tar.bz2")) {
@ -176,7 +178,8 @@ public class ArchiveReader {
} catch (UnsupportedEncodingException e) {
/* No way that US-ASCII is not supported. */
}
int start = -1, end = -1;
int start = -1;
int end = -1;
String startToken = "onion-key\n";
while (end < ascii.length()) {
start = ascii.indexOf(startToken, end);
@ -198,8 +201,8 @@ public class ArchiveReader {
if (!this.microdescriptorValidAfterTimes.containsKey(
digest256Hex)) {
logger.fine("Could not store microdescriptor '"
+ digest256Hex + "', which was not contained in a "
+ "microdesc consensus.");
+ digest256Hex + "', which was not contained in a "
+ "microdesc consensus.");
continue;
}
for (String validAfterTime :
@ -265,6 +268,7 @@ public class ArchiveReader {
private Map<String, Set<String>> microdescriptorValidAfterTimes =
new HashMap<String, Set<String>>();
public void haveParsedMicrodescConsensus(String validAfterTime,
SortedSet<String> microdescriptorDigests) {
for (String microdescriptor : microdescriptorDigests) {

View File

@ -1,7 +1,15 @@
/* Copyright 2010--2016 The Tor Project
* See LICENSE for licensing information */
package org.torproject.collector.relaydescs;
import org.torproject.collector.main.Configuration;
import org.torproject.collector.main.LockFile;
import org.torproject.collector.main.LoggingConfiguration;
import org.torproject.descriptor.DescriptorParseException;
import org.torproject.descriptor.DescriptorParser;
import org.torproject.descriptor.DescriptorSourceFactory;
import java.io.BufferedOutputStream;
import java.io.BufferedReader;
import java.io.BufferedWriter;
@ -27,13 +35,6 @@ import java.util.TreeMap;
import java.util.logging.Level;
import java.util.logging.Logger;
import org.torproject.descriptor.DescriptorParser;
import org.torproject.descriptor.DescriptorSourceFactory;
import org.torproject.descriptor.DescriptorParseException;
import org.torproject.collector.main.Configuration;
import org.torproject.collector.main.LockFile;
import org.torproject.collector.main.LoggingConfiguration;
public class ArchiveWriter extends Thread {
public static void main(String[] args) {
@ -78,11 +79,13 @@ public class ArchiveWriter extends Thread {
private File outputDirectory;
private String rsyncCatString;
private DescriptorParser descriptorParser;
private int storedConsensusesCounter = 0,
storedMicrodescConsensusesCounter = 0, storedVotesCounter = 0,
storedCertsCounter = 0, storedServerDescriptorsCounter = 0,
storedExtraInfoDescriptorsCounter = 0,
storedMicrodescriptorsCounter = 0;
private int storedConsensusesCounter = 0;
private int storedMicrodescConsensusesCounter = 0;
private int storedVotesCounter = 0;
private int storedCertsCounter = 0;
private int storedServerDescriptorsCounter = 0;
private int storedExtraInfoDescriptorsCounter = 0;
private int storedMicrodescriptorsCounter = 0;
private SortedMap<Long, SortedSet<String>> storedConsensuses =
new TreeMap<Long, SortedSet<String>>();
@ -361,6 +364,7 @@ public class ArchiveWriter extends Thread {
private static final byte[] CONSENSUS_ANNOTATION =
"@type network-status-consensus-3 1.0\n".getBytes();
public void storeConsensus(byte[] data, long validAfter,
SortedSet<String> dirSources,
SortedSet<String> serverDescriptorDigests) {
@ -376,8 +380,8 @@ public class ArchiveWriter extends Thread {
if (this.store(CONSENSUS_ANNOTATION, data, outputFiles, null)) {
this.storedConsensusesCounter++;
}
if (!tarballFileExistedBefore &&
this.now - validAfter < 3L * 60L * 60L * 1000L) {
if (!tarballFileExistedBefore
&& this.now - validAfter < 3L * 60L * 60L * 1000L) {
this.storedConsensuses.put(validAfter, serverDescriptorDigests);
this.expectedVotes.put(validAfter, dirSources.size());
}
@ -385,6 +389,7 @@ public class ArchiveWriter extends Thread {
private static final byte[] MICRODESCCONSENSUS_ANNOTATION =
"@type network-status-microdesc-consensus-3 1.0\n".getBytes();
public void storeMicrodescConsensus(byte[] data, long validAfter,
SortedSet<String> microdescriptorDigests) {
SimpleDateFormat yearMonthDirectoryFormat = new SimpleDateFormat(
@ -406,8 +411,8 @@ public class ArchiveWriter extends Thread {
null)) {
this.storedMicrodescConsensusesCounter++;
}
if (!tarballFileExistedBefore &&
this.now - validAfter < 3L * 60L * 60L * 1000L) {
if (!tarballFileExistedBefore
&& this.now - validAfter < 3L * 60L * 60L * 1000L) {
this.storedMicrodescConsensuses.put(validAfter,
microdescriptorDigests);
}
@ -415,6 +420,7 @@ public class ArchiveWriter extends Thread {
private static final byte[] VOTE_ANNOTATION =
"@type network-status-vote-3 1.0\n".getBytes();
public void storeVote(byte[] data, long validAfter,
String fingerprint, String digest,
SortedSet<String> serverDescriptorDigests) {
@ -431,8 +437,8 @@ public class ArchiveWriter extends Thread {
if (this.store(VOTE_ANNOTATION, data, outputFiles, null)) {
this.storedVotesCounter++;
}
if (!tarballFileExistedBefore &&
this.now - validAfter < 3L * 60L * 60L * 1000L) {
if (!tarballFileExistedBefore
&& this.now - validAfter < 3L * 60L * 60L * 1000L) {
if (!this.storedVotes.containsKey(validAfter)) {
this.storedVotes.put(validAfter,
new TreeMap<String, SortedSet<String>>());
@ -444,6 +450,7 @@ public class ArchiveWriter extends Thread {
private static final byte[] CERTIFICATE_ANNOTATION =
"@type dir-key-certificate-3 1.0\n".getBytes();
public void storeCertificate(byte[] data, String fingerprint,
long published) {
SimpleDateFormat printFormat = new SimpleDateFormat(
@ -459,6 +466,7 @@ public class ArchiveWriter extends Thread {
private static final byte[] SERVER_DESCRIPTOR_ANNOTATION =
"@type server-descriptor 1.0\n".getBytes();
public void storeServerDescriptor(byte[] data, String digest,
long published, String extraInfoDigest) {
SimpleDateFormat printFormat = new SimpleDateFormat("yyyy/MM/");
@ -477,8 +485,8 @@ public class ArchiveWriter extends Thread {
append)) {
this.storedServerDescriptorsCounter++;
}
if (!tarballFileExistedBefore &&
this.now - published < 48L * 60L * 60L * 1000L) {
if (!tarballFileExistedBefore
&& this.now - published < 48L * 60L * 60L * 1000L) {
if (!this.storedServerDescriptors.containsKey(published)) {
this.storedServerDescriptors.put(published,
new HashMap<String, String>());
@ -490,6 +498,7 @@ public class ArchiveWriter extends Thread {
private static final byte[] EXTRA_INFO_ANNOTATION =
"@type extra-info 1.0\n".getBytes();
public void storeExtraInfoDescriptor(byte[] data,
String extraInfoDigest, long published) {
SimpleDateFormat descriptorFormat = new SimpleDateFormat("yyyy/MM/");
@ -507,8 +516,8 @@ public class ArchiveWriter extends Thread {
if (this.store(EXTRA_INFO_ANNOTATION, data, outputFiles, append)) {
this.storedExtraInfoDescriptorsCounter++;
}
if (!tarballFileExistedBefore &&
this.now - published < 48L * 60L * 60L * 1000L) {
if (!tarballFileExistedBefore
&& this.now - published < 48L * 60L * 60L * 1000L) {
if (!this.storedExtraInfoDescriptors.containsKey(published)) {
this.storedExtraInfoDescriptors.put(published,
new HashSet<String>());
@ -519,6 +528,7 @@ public class ArchiveWriter extends Thread {
private static final byte[] MICRODESCRIPTOR_ANNOTATION =
"@type microdescriptor 1.0\n".getBytes();
public void storeMicrodescriptor(byte[] data,
String microdescriptorDigest, long validAfter) {
/* TODO We could check here whether we already stored the
@ -545,8 +555,8 @@ public class ArchiveWriter extends Thread {
append)) {
this.storedMicrodescriptorsCounter++;
}
if (!tarballFileExistedBefore &&
this.now - validAfter < 40L * 24L * 60L * 60L * 1000L) {
if (!tarballFileExistedBefore
&& this.now - validAfter < 40L * 24L * 60L * 60L * 1000L) {
if (!this.storedMicrodescriptors.containsKey(validAfter)) {
this.storedMicrodescriptors.put(validAfter,
new HashSet<String>());
@ -557,6 +567,7 @@ public class ArchiveWriter extends Thread {
}
private StringBuilder intermediateStats = new StringBuilder();
public void intermediateStats(String event) {
intermediateStats.append("While " + event + ", we stored "
+ this.storedConsensusesCounter + " consensus(es), "
@ -600,8 +611,9 @@ public class ArchiveWriter extends Thread {
for (Set<String> descriptors : this.storedMicrodescriptors.values()) {
knownMicrodescriptors.addAll(descriptors);
}
boolean missingDescriptors = false, missingVotes = false,
missingMicrodescConsensus = false;
boolean missingDescriptors = false;
boolean missingVotes = false;
boolean missingMicrodescConsensus = false;
for (Map.Entry<Long, SortedSet<String>> c :
this.storedConsensuses.entrySet()) {
long validAfterMillis = c.getKey();
@ -613,8 +625,10 @@ public class ArchiveWriter extends Thread {
foundVotes = this.storedVotes.get(validAfterMillis).size();
for (Map.Entry<String, SortedSet<String>> v :
this.storedVotes.get(validAfterMillis).entrySet()) {
int voteFoundServerDescs = 0, voteAllServerDescs = 0,
voteFoundExtraInfos = 0, voteAllExtraInfos = 0;
int voteFoundServerDescs = 0;
int voteAllServerDescs = 0;
int voteFoundExtraInfos = 0;
int voteAllExtraInfos = 0;
for (String serverDescriptorDigest : v.getValue()) {
voteAllServerDescs++;
if (knownServerDescriptors.containsKey(
@ -636,32 +650,35 @@ public class ArchiveWriter extends Thread {
if (voteAllServerDescs > 0) {
sb.append(String.format(", %d/%d S (%.1f%%)",
voteFoundServerDescs, voteAllServerDescs,
100.0D * (double) voteFoundServerDescs /
(double) voteAllServerDescs));
100.0D * (double) voteFoundServerDescs
/ (double) voteAllServerDescs));
} else {
sb.append(", 0/0 S");
}
if (voteAllExtraInfos > 0) {
sb.append(String.format(", %d/%d E (%.1f%%)",
voteFoundExtraInfos, voteAllExtraInfos,
100.0D * (double) voteFoundExtraInfos /
(double) voteAllExtraInfos));
100.0D * (double) voteFoundExtraInfos
/ (double) voteAllExtraInfos));
} else {
sb.append(", 0/0 E");
}
String fingerprint = v.getKey();
/* Ignore turtles when warning about missing descriptors. */
if (!fingerprint.equalsIgnoreCase(
"27B6B5996C426270A5C95488AA5BCEB6BCC86956") &&
(voteFoundServerDescs * 1000 < voteAllServerDescs * 995 ||
voteFoundExtraInfos * 1000 < voteAllExtraInfos * 995)) {
"27B6B5996C426270A5C95488AA5BCEB6BCC86956")
&& (voteFoundServerDescs * 1000 < voteAllServerDescs * 995
|| voteFoundExtraInfos * 1000 < voteAllExtraInfos * 995)) {
missingDescriptors = true;
}
}
}
int foundServerDescs = 0, allServerDescs = 0, foundExtraInfos = 0,
allExtraInfos = 0, foundMicrodescriptors = 0,
allMicrodescriptors = 0;
int foundServerDescs = 0;
int allServerDescs = 0;
int foundExtraInfos = 0;
int allExtraInfos = 0;
int foundMicrodescriptors = 0;
int allMicrodescriptors = 0;
for (String serverDescriptorDigest : c.getValue()) {
allServerDescs++;
if (knownServerDescriptors.containsKey(
@ -688,15 +705,15 @@ public class ArchiveWriter extends Thread {
}
if (allServerDescs > 0) {
sb.append(String.format(", %d/%d S (%.1f%%)", foundServerDescs,
allServerDescs, 100.0D * (double) foundServerDescs /
(double) allServerDescs));
allServerDescs, 100.0D * (double) foundServerDescs
/ (double) allServerDescs));
} else {
sb.append(", 0/0 S");
}
if (allExtraInfos > 0) {
sb.append(String.format(", %d/%d E (%.1f%%)", foundExtraInfos,
allExtraInfos, 100.0D * (double) foundExtraInfos /
(double) allExtraInfos));
allExtraInfos, 100.0D * (double) foundExtraInfos
/ (double) allExtraInfos));
} else {
sb.append(", 0/0 E");
}
@ -712,17 +729,17 @@ public class ArchiveWriter extends Thread {
if (allMicrodescriptors > 0) {
sb.append(String.format(", %d/%d M (%.1f%%)",
foundMicrodescriptors, allMicrodescriptors,
100.0D * (double) foundMicrodescriptors /
(double) allMicrodescriptors));
100.0D * (double) foundMicrodescriptors
/ (double) allMicrodescriptors));
} else {
sb.append(", 0/0 M");
}
} else {
missingMicrodescConsensus = true;
}
if (foundServerDescs * 1000 < allServerDescs * 995 ||
foundExtraInfos * 1000 < allExtraInfos * 995 ||
foundMicrodescriptors * 1000 < allMicrodescriptors * 995) {
if (foundServerDescs * 1000 < allServerDescs * 995
|| foundExtraInfos * 1000 < allExtraInfos * 995
|| foundMicrodescriptors * 1000 < allMicrodescriptors * 995) {
missingDescriptors = true;
}
if (foundVotes < allVotes) {
@ -756,44 +773,44 @@ public class ArchiveWriter extends Thread {
"yyyy-MM-dd HH:mm:ss");
dateTimeFormat.setTimeZone(TimeZone.getTimeZone("UTC"));
long tooOldMillis = this.now - 330L * 60L * 1000L;
if (!this.storedConsensuses.isEmpty() &&
this.storedConsensuses.lastKey() < tooOldMillis) {
if (!this.storedConsensuses.isEmpty()
&& this.storedConsensuses.lastKey() < tooOldMillis) {
this.logger.warning("The last known relay network status "
+ "consensus was valid after "
+ dateTimeFormat.format(this.storedConsensuses.lastKey())
+ ", which is more than 5:30 hours in the past.");
}
if (!this.storedMicrodescConsensuses.isEmpty() &&
this.storedMicrodescConsensuses.lastKey() < tooOldMillis) {
if (!this.storedMicrodescConsensuses.isEmpty()
&& this.storedMicrodescConsensuses.lastKey() < tooOldMillis) {
this.logger.warning("The last known relay network status "
+ "microdesc consensus was valid after "
+ dateTimeFormat.format(
this.storedMicrodescConsensuses.lastKey())
+ ", which is more than 5:30 hours in the past.");
}
if (!this.storedVotes.isEmpty() &&
this.storedVotes.lastKey() < tooOldMillis) {
if (!this.storedVotes.isEmpty()
&& this.storedVotes.lastKey() < tooOldMillis) {
this.logger.warning("The last known relay network status vote "
+ "was valid after " + dateTimeFormat.format(
this.storedVotes.lastKey()) + ", which is more than 5:30 hours "
+ "in the past.");
}
if (!this.storedServerDescriptors.isEmpty() &&
this.storedServerDescriptors.lastKey() < tooOldMillis) {
if (!this.storedServerDescriptors.isEmpty()
&& this.storedServerDescriptors.lastKey() < tooOldMillis) {
this.logger.warning("The last known relay server descriptor was "
+ "published at "
+ dateTimeFormat.format(this.storedServerDescriptors.lastKey())
+ ", which is more than 5:30 hours in the past.");
}
if (!this.storedExtraInfoDescriptors.isEmpty() &&
this.storedExtraInfoDescriptors.lastKey() < tooOldMillis) {
if (!this.storedExtraInfoDescriptors.isEmpty()
&& this.storedExtraInfoDescriptors.lastKey() < tooOldMillis) {
this.logger.warning("The last known relay extra-info descriptor "
+ "was published at " + dateTimeFormat.format(
this.storedExtraInfoDescriptors.lastKey())
+ ", which is more than 5:30 hours in the past.");
}
if (!this.storedMicrodescriptors.isEmpty() &&
this.storedMicrodescriptors.lastKey() < tooOldMillis) {
if (!this.storedMicrodescriptors.isEmpty()
&& this.storedMicrodescriptors.lastKey() < tooOldMillis) {
this.logger.warning("The last known relay microdescriptor was "
+ "contained in a microdesc consensus that was valid after "
+ dateTimeFormat.format(this.storedMicrodescriptors.lastKey())

View File

@ -1,7 +1,11 @@
/* Copyright 2010--2012 The Tor Project
/* Copyright 2010--2016 The Tor Project
* See LICENSE for licensing information */
package org.torproject.collector.relaydescs;
import org.apache.commons.codec.binary.Hex;
import org.apache.commons.codec.digest.DigestUtils;
import java.io.BufferedInputStream;
import java.io.BufferedReader;
import java.io.BufferedWriter;
@ -25,9 +29,6 @@ import java.util.TreeSet;
import java.util.logging.Level;
import java.util.logging.Logger;
import org.apache.commons.codec.binary.Hex;
import org.apache.commons.codec.digest.DigestUtils;
/**
* Parses all descriptors in local directory cacheddesc/ and sorts them
* into directory structure in directory-archive/.
@ -36,8 +37,8 @@ public class CachedRelayDescriptorReader {
public CachedRelayDescriptorReader(RelayDescriptorParser rdp,
List<String> inputDirectories, File statsDirectory) {
if (rdp == null || inputDirectories == null ||
inputDirectories.isEmpty() || statsDirectory == null) {
if (rdp == null || inputDirectories == null
|| inputDirectories.isEmpty() || statsDirectory == null) {
throw new IllegalArgumentException();
}
@ -48,8 +49,8 @@ public class CachedRelayDescriptorReader {
/* Read import history containing SHA-1 digests of previously parsed
* statuses and descriptors, so that we can skip them in this run. */
Set<String> lastImportHistory = new HashSet<String>(),
currentImportHistory = new HashSet<String>();
Set<String> lastImportHistory = new HashSet<String>();
Set<String> currentImportHistory = new HashSet<String>();
File importHistoryFile = new File(statsDirectory,
"cacheddesc-import-history");
if (importHistoryFile.exists()) {
@ -114,8 +115,8 @@ public class CachedRelayDescriptorReader {
SimpleDateFormat dateTimeFormat =
new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
dateTimeFormat.setTimeZone(TimeZone.getTimeZone("UTC"));
if (dateTimeFormat.parse(line.substring("valid-after ".
length())).getTime() < System.currentTimeMillis()
if (dateTimeFormat.parse(line.substring("valid-after "
.length())).getTime() < System.currentTimeMillis()
- 6L * 60L * 60L * 1000L) {
logger.warning("Cached descriptor files in "
+ cachedDescDir.getAbsolutePath() + " are stale. "
@ -133,8 +134,8 @@ public class CachedRelayDescriptorReader {
if (rdp != null) {
String digest = Hex.encodeHexString(DigestUtils.sha(
allData));
if (!lastImportHistory.contains(digest) &&
!currentImportHistory.contains(digest)) {
if (!lastImportHistory.contains(digest)
&& !currentImportHistory.contains(digest)) {
rdp.parse(allData);
} else {
dumpStats.append(" (skipped)");
@ -142,7 +143,8 @@ public class CachedRelayDescriptorReader {
currentImportHistory.add(digest);
}
} else if (f.getName().equals("v3-status-votes")) {
int parsedNum = 0, skippedNum = 0;
int parsedNum = 0;
int skippedNum = 0;
String ascii = new String(allData, "US-ASCII");
String startToken = "network-status-version ";
int end = ascii.length();
@ -159,8 +161,8 @@ public class CachedRelayDescriptorReader {
if (rdp != null) {
String digest = Hex.encodeHexString(DigestUtils.sha(
rawNetworkStatusBytes));
if (!lastImportHistory.contains(digest) &&
!currentImportHistory.contains(digest)) {
if (!lastImportHistory.contains(digest)
&& !currentImportHistory.contains(digest)) {
rdp.parse(rawNetworkStatusBytes);
parsedNum++;
} else {
@ -173,16 +175,19 @@ public class CachedRelayDescriptorReader {
}
dumpStats.append("\n" + f.getName() + ": parsed " + parsedNum
+ ", skipped " + skippedNum + " votes");
} else if (f.getName().startsWith("cached-descriptors") ||
f.getName().startsWith("cached-extrainfo")) {
} else if (f.getName().startsWith("cached-descriptors")
|| f.getName().startsWith("cached-extrainfo")) {
String ascii = new String(allData, "US-ASCII");
int start = -1, sig = -1, end = -1;
int start = -1;
int sig = -1;
int end = -1;
String startToken =
f.getName().startsWith("cached-descriptors") ?
"router " : "extra-info ";
f.getName().startsWith("cached-descriptors")
? "router " : "extra-info ";
String sigToken = "\nrouter-signature\n";
String endToken = "\n-----END SIGNATURE-----\n";
int parsedNum = 0, skippedNum = 0;
int parsedNum = 0;
int skippedNum = 0;
while (end < ascii.length()) {
start = ascii.indexOf(startToken, end);
if (start < 0) {
@ -203,8 +208,8 @@ public class CachedRelayDescriptorReader {
if (rdp != null) {
String digest = Hex.encodeHexString(DigestUtils.sha(
descBytes));
if (!lastImportHistory.contains(digest) &&
!currentImportHistory.contains(digest)) {
if (!lastImportHistory.contains(digest)
&& !currentImportHistory.contains(digest)) {
rdp.parse(descBytes);
parsedNum++;
} else {
@ -215,8 +220,8 @@ public class CachedRelayDescriptorReader {
}
dumpStats.append("\n" + f.getName() + ": parsed " + parsedNum
+ ", skipped " + skippedNum + " "
+ (f.getName().startsWith("cached-descriptors") ?
"server" : "extra-info") + " descriptors");
+ (f.getName().startsWith("cached-descriptors")
? "server" : "extra-info") + " descriptors");
}
} catch (IOException e) {
logger.log(Level.WARNING, "Failed reading "

View File

@ -1,5 +1,22 @@
/* Copyright 2016 The Tor Project
* See LICENSE for licensing information */
package org.torproject.collector.relaydescs;
import org.torproject.descriptor.Descriptor;
import org.torproject.descriptor.DescriptorFile;
import org.torproject.descriptor.DescriptorReader;
import org.torproject.descriptor.DescriptorSourceFactory;
import org.torproject.descriptor.DirSourceEntry;
import org.torproject.descriptor.ExtraInfoDescriptor;
import org.torproject.descriptor.Microdescriptor;
import org.torproject.descriptor.NetworkStatusEntry;
import org.torproject.descriptor.RelayNetworkStatusConsensus;
import org.torproject.descriptor.RelayNetworkStatusVote;
import org.torproject.descriptor.ServerDescriptor;
import com.google.gson.Gson;
import java.io.File;
import java.io.FileReader;
import java.io.FileWriter;
@ -17,20 +34,6 @@ import java.util.TreeSet;
import java.util.logging.Level;
import java.util.logging.Logger;
import org.torproject.descriptor.Descriptor;
import org.torproject.descriptor.DescriptorFile;
import org.torproject.descriptor.DescriptorReader;
import org.torproject.descriptor.DescriptorSourceFactory;
import org.torproject.descriptor.DirSourceEntry;
import org.torproject.descriptor.ExtraInfoDescriptor;
import org.torproject.descriptor.Microdescriptor;
import org.torproject.descriptor.NetworkStatusEntry;
import org.torproject.descriptor.RelayNetworkStatusConsensus;
import org.torproject.descriptor.RelayNetworkStatusVote;
import org.torproject.descriptor.ServerDescriptor;
import com.google.gson.Gson;
public class ReferenceChecker {
private Logger log = Logger.getLogger(ReferenceChecker.class.getName());
@ -87,8 +90,8 @@ public class ReferenceChecker {
return false;
}
Reference other = (Reference) otherObject;
return this.referencing.equals(other.referencing) &&
this.referenced.equals(other.referenced);
return this.referencing.equals(other.referencing)
&& this.referenced.equals(other.referenced);
}
@Override
@ -168,6 +171,7 @@ public class ReferenceChecker {
}
private static DateFormat dateTimeFormat;
static {
dateTimeFormat = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss'Z'",
Locale.US);
@ -175,9 +179,15 @@ public class ReferenceChecker {
dateTimeFormat.setTimeZone(TimeZone.getTimeZone("UTC"));
}
private static final long ONE_HOUR = 60L * 60L * 1000L,
THREE_HOURS = 3L * ONE_HOUR, SIX_HOURS = 6L * ONE_HOUR,
ONE_DAY = 24L * ONE_HOUR, THIRTY_DAYS = 30L * ONE_DAY;
private static final long ONE_HOUR = 60L * 60L * 1000L;
private static final long THREE_HOURS = 3L * ONE_HOUR;
private static final long SIX_HOURS = 6L * ONE_HOUR;
private static final long ONE_DAY = 24L * ONE_HOUR;
private static final long THIRTY_DAYS = 30L * ONE_DAY;
private void readRelayNetworkStatusConsensusUnflavored(
RelayNetworkStatusConsensus consensus) {
@ -194,8 +204,8 @@ public class ReferenceChecker {
consensus.getValidAfterMillis() + THREE_HOURS);
}
}
double entryWeight = 200.0 /
((double) consensus.getStatusEntries().size());
double entryWeight = 200.0
/ ((double) consensus.getStatusEntries().size());
for (NetworkStatusEntry entry :
consensus.getStatusEntries().values()) {
this.addReference(referencing,
@ -212,8 +222,8 @@ public class ReferenceChecker {
String referencing = String.format("M-%s", validAfter);
this.addReference(referencing, String.format("C-%s", validAfter), 1.0,
consensus.getValidAfterMillis() + THREE_HOURS);
double entryWeight = 200.0 /
((double) consensus.getStatusEntries().size());
double entryWeight = 200.0
/ ((double) consensus.getStatusEntries().size());
for (NetworkStatusEntry entry :
consensus.getStatusEntries().values()) {
for (String digest : entry.getMicrodescriptorDigests()) {
@ -227,8 +237,8 @@ public class ReferenceChecker {
String validAfter = dateTimeFormat.format(vote.getValidAfterMillis());
String referencing = String.format("V-%s-%s", validAfter,
vote.getIdentity());
double entryWeight = 200.0 /
((double) vote.getStatusEntries().size());
double entryWeight = 200.0
/ ((double) vote.getStatusEntries().size());
for (NetworkStatusEntry entry : vote.getStatusEntries().values()) {
this.addReference(referencing,
String.format("S-%s", entry.getDescriptor()), entryWeight,
@ -277,8 +287,8 @@ public class ReferenceChecker {
StringBuilder sb = new StringBuilder("Missing referenced "
+ "descriptors:");
for (Reference reference : this.references) {
if (reference.referenced.length() > 0 &&
!knownDescriptors.contains(reference.referenced)) {
if (reference.referenced.length() > 0
&& !knownDescriptors.contains(reference.referenced)) {
if (!missingDescriptors.contains(reference.referenced)) {
totalMissingDescriptorsWeight += reference.weight;
}

View File

@ -1,7 +1,11 @@
/* Copyright 2010--2014 The Tor Project
/* Copyright 2010--2016 The Tor Project
* See LICENSE for licensing information */
package org.torproject.collector.relaydescs;
import org.apache.commons.codec.binary.Base64;
import org.apache.commons.codec.digest.DigestUtils;
import java.io.BufferedInputStream;
import java.io.BufferedReader;
import java.io.BufferedWriter;
@ -31,9 +35,6 @@ import java.util.logging.Level;
import java.util.logging.Logger;
import java.util.zip.InflaterInputStream;
import org.apache.commons.codec.binary.Base64;
import org.apache.commons.codec.digest.DigestUtils;
/**
* Downloads relay descriptors from the directory authorities via HTTP.
* Keeps a list of missing descriptors that gets updated by parse results
@ -224,25 +225,61 @@ public class RelayDescriptorDownloader {
* that we requested, and that we successfully downloaded in this
* execution.
*/
private int oldMissingConsensuses = 0,
oldMissingMicrodescConsensuses = 0, oldMissingVotes = 0,
oldMissingServerDescriptors = 0, oldMissingExtraInfoDescriptors = 0,
oldMissingMicrodescriptors = 0, newMissingConsensuses = 0,
newMissingMicrodescConsensuses = 0, newMissingVotes = 0,
newMissingServerDescriptors = 0, newMissingExtraInfoDescriptors = 0,
newMissingMicrodescriptors = 0, requestedConsensuses = 0,
requestedMicrodescConsensuses = 0, requestedVotes = 0,
requestedMissingServerDescriptors = 0,
requestedAllServerDescriptors = 0,
requestedMissingExtraInfoDescriptors = 0,
requestedAllExtraInfoDescriptors = 0,
requestedMissingMicrodescriptors = 0, downloadedConsensuses = 0,
downloadedMicrodescConsensuses = 0, downloadedVotes = 0,
downloadedMissingServerDescriptors = 0,
downloadedAllServerDescriptors = 0,
downloadedMissingExtraInfoDescriptors = 0,
downloadedAllExtraInfoDescriptors = 0,
downloadedMissingMicrodescriptors = 0;
private int oldMissingConsensuses = 0;
private int oldMissingMicrodescConsensuses = 0;
private int oldMissingVotes = 0;
private int oldMissingServerDescriptors = 0;
private int oldMissingExtraInfoDescriptors = 0;
private int oldMissingMicrodescriptors = 0;
private int newMissingConsensuses = 0;
private int newMissingMicrodescConsensuses = 0;
private int newMissingVotes = 0;
private int newMissingServerDescriptors = 0;
private int newMissingExtraInfoDescriptors = 0;
private int newMissingMicrodescriptors = 0;
private int requestedConsensuses = 0;
private int requestedMicrodescConsensuses = 0;
private int requestedVotes = 0;
private int requestedMissingServerDescriptors = 0;
private int requestedAllServerDescriptors = 0;
private int requestedMissingExtraInfoDescriptors = 0;
private int requestedAllExtraInfoDescriptors = 0;
private int requestedMissingMicrodescriptors = 0;
private int downloadedConsensuses = 0;
private int downloadedMicrodescConsensuses = 0;
private int downloadedVotes = 0;
private int downloadedMissingServerDescriptors = 0;
private int downloadedAllServerDescriptors = 0;
private int downloadedMissingExtraInfoDescriptors = 0;
private int downloadedAllExtraInfoDescriptors = 0;
private int downloadedMissingMicrodescriptors = 0;
/**
* Initializes this class, including reading in missing descriptors from
@ -292,8 +329,8 @@ public class RelayDescriptorDownloader {
SimpleDateFormat format = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
format.setTimeZone(TimeZone.getTimeZone("UTC"));
long now = System.currentTimeMillis();
this.currentValidAfter = format.format((now / (60L * 60L * 1000L)) *
(60L * 60L * 1000L));
this.currentValidAfter = format.format((now / (60L * 60L * 1000L))
* (60L * 60L * 1000L));
this.descriptorCutOff = format.format(now - 24L * 60L * 60L * 1000L);
this.currentTimestamp = format.format(now);
this.downloadAllDescriptorsCutOff = format.format(now
@ -317,14 +354,14 @@ public class RelayDescriptorDownloader {
while ((line = br.readLine()) != null) {
if (line.split(",").length > 2) {
String published = line.split(",")[1];
if (((line.startsWith("consensus,") ||
line.startsWith("consensus-microdesc,") ||
line.startsWith("vote,")) &&
this.currentValidAfter.equals(published)) ||
((line.startsWith("server,") ||
line.startsWith("extra,") ||
line.startsWith("micro,")) &&
this.descriptorCutOff.compareTo(published) < 0)) {
if (((line.startsWith("consensus,")
|| line.startsWith("consensus-microdesc,")
|| line.startsWith("vote,"))
&& this.currentValidAfter.equals(published))
|| ((line.startsWith("server,")
|| line.startsWith("extra,")
|| line.startsWith("micro,"))
&& this.descriptorCutOff.compareTo(published) < 0)) {
if (!line.endsWith("NA")) {
/* Not missing. */
} else if (line.startsWith("consensus,")) {
@ -352,8 +389,8 @@ public class RelayDescriptorDownloader {
}
this.microdescriptorKeys.get(microdescriptorDigest).add(
microdescriptorKey);
if (line.endsWith("NA") && !this.missingMicrodescriptors.
contains(microdescriptorDigest)) {
if (line.endsWith("NA") && !this.missingMicrodescriptors
.contains(microdescriptorDigest)) {
this.missingMicrodescriptors.add(microdescriptorDigest);
oldMissingMicrodescriptors++;
}
@ -418,8 +455,8 @@ public class RelayDescriptorDownloader {
* download all server and extra-info descriptors from. */
this.downloadAllDescriptorsFromAuthorities = new HashSet<String>();
for (String authority : this.authorities) {
if (!this.lastDownloadedAllDescriptors.containsKey(authority) ||
this.lastDownloadedAllDescriptors.get(authority).compareTo(
if (!this.lastDownloadedAllDescriptors.containsKey(authority)
|| this.lastDownloadedAllDescriptors.get(authority).compareTo(
this.downloadAllDescriptorsCutOff) < 0) {
this.downloadAllDescriptorsFromAuthorities.add(authority);
}
@ -523,8 +560,8 @@ public class RelayDescriptorDownloader {
this.microdescriptorKeys.get(microdescriptorDigest).add(
microdescriptorKey);
this.missingDescriptors.put(microdescriptorKey, parsed);
if (parsed.equals("NA") &&
!this.missingMicrodescriptors.contains(microdescriptorDigest)) {
if (parsed.equals("NA")
&& !this.missingMicrodescriptors.contains(microdescriptorDigest)) {
this.missingMicrodescriptors.add(microdescriptorDigest);
this.newMissingMicrodescriptors++;
}
@ -662,8 +699,8 @@ public class RelayDescriptorDownloader {
/* Start with downloading the current consensus, unless we already
* have it. */
if (downloadCurrentConsensus) {
if (this.missingDescriptors.containsKey(consensusKey) &&
this.missingDescriptors.get(consensusKey).equals("NA")) {
if (this.missingDescriptors.containsKey(consensusKey)
&& this.missingDescriptors.get(consensusKey).equals("NA")) {
this.requestedConsensuses++;
this.downloadedConsensuses +=
this.downloadResourceFromAuthority(authority,
@ -673,10 +710,9 @@ public class RelayDescriptorDownloader {
/* Then try to download the microdesc consensus. */
if (downloadCurrentMicrodescConsensus) {
if (this.missingDescriptors.containsKey(
microdescConsensusKey) &&
this.missingDescriptors.get(microdescConsensusKey).
equals("NA")) {
if (this.missingDescriptors.containsKey(microdescConsensusKey)
&& this.missingDescriptors.get(microdescConsensusKey)
.equals("NA")) {
this.requestedMicrodescConsensuses++;
this.downloadedMicrodescConsensuses +=
this.downloadResourceFromAuthority(authority,
@ -690,8 +726,8 @@ public class RelayDescriptorDownloader {
SortedSet<String> fingerprints = new TreeSet<String>();
for (Map.Entry<String, String> e :
this.missingDescriptors.entrySet()) {
if (e.getValue().equals("NA") &&
e.getKey().startsWith(voteKeyPrefix)) {
if (e.getValue().equals("NA")
&& e.getKey().startsWith(voteKeyPrefix)) {
String fingerprint = e.getKey().split(",")[2];
fingerprints.add(fingerprint);
}
@ -714,9 +750,9 @@ public class RelayDescriptorDownloader {
* authority if we haven't done so for 24 hours and if we're
* configured to do so. */
if (this.downloadAllDescriptorsFromAuthorities.contains(
authority) && ((type.equals("server") &&
this.downloadAllServerDescriptors) ||
(type.equals("extra") && this.downloadAllExtraInfos))) {
authority) && ((type.equals("server")
&& this.downloadAllServerDescriptors)
|| (type.equals("extra") && this.downloadAllExtraInfos))) {
int downloadedAllDescriptors =
this.downloadResourceFromAuthority(authority, "/tor/"
+ type + "/all");
@ -732,11 +768,11 @@ public class RelayDescriptorDownloader {
/* Download missing server descriptors, extra-info descriptors,
* and microdescriptors if we're configured to do so. */
} else if ((type.equals("server") &&
this.downloadMissingServerDescriptors) ||
(type.equals("extra") && this.downloadMissingExtraInfos) ||
(type.equals("micro") &&
this.downloadMissingMicrodescriptors)) {
} else if ((type.equals("server")
&& this.downloadMissingServerDescriptors)
|| (type.equals("extra") && this.downloadMissingExtraInfos)
|| (type.equals("micro")
&& this.downloadMissingMicrodescriptors)) {
/* Go through the list of missing descriptors of this type
* and combine the descriptor identifiers to a URL of up to
@ -746,23 +782,24 @@ public class RelayDescriptorDownloader {
new TreeSet<String>();
for (Map.Entry<String, String> e :
this.missingDescriptors.entrySet()) {
if (e.getValue().equals("NA") &&
e.getKey().startsWith(type + ",") &&
this.descriptorCutOff.compareTo(
if (e.getValue().equals("NA")
&& e.getKey().startsWith(type + ",")
&& this.descriptorCutOff.compareTo(
e.getKey().split(",")[1]) < 0) {
String descriptorIdentifier = e.getKey().split(",")[3];
descriptorIdentifiers.add(descriptorIdentifier);
}
}
StringBuilder combinedResource = null;
int descriptorsInCombinedResource = 0,
requestedDescriptors = 0, downloadedDescriptors = 0;
int descriptorsInCombinedResource = 0;
int requestedDescriptors = 0;
int downloadedDescriptors = 0;
int maxDescriptorsInCombinedResource =
type.equals("micro") ? 92 : 96;
String separator = type.equals("micro") ? "-" : "+";
for (String descriptorIdentifier : descriptorIdentifiers) {
if (descriptorsInCombinedResource >=
maxDescriptorsInCombinedResource) {
if (descriptorsInCombinedResource
>= maxDescriptorsInCombinedResource) {
requestedDescriptors += descriptorsInCombinedResource;
downloadedDescriptors +=
this.downloadResourceFromAuthority(authority,
@ -836,8 +873,8 @@ public class RelayDescriptorDownloader {
huc.connect();
int response = huc.getResponseCode();
if (response == 200) {
BufferedInputStream in = this.downloadCompressed &&
!resource.startsWith("/tor/extra/")
BufferedInputStream in = this.downloadCompressed
&& !resource.startsWith("/tor/extra/")
? new BufferedInputStream(new InflaterInputStream(
huc.getInputStream()))
: new BufferedInputStream(huc.getInputStream());
@ -857,10 +894,10 @@ public class RelayDescriptorDownloader {
if (resource.startsWith("/tor/status-vote/current/")) {
this.rdp.parse(allData);
receivedDescriptors = 1;
} else if (resource.startsWith("/tor/server/") ||
resource.startsWith("/tor/extra/")) {
if (resource.equals("/tor/server/all") ||
resource.equals("/tor/extra/all")) {
} else if (resource.startsWith("/tor/server/")
|| resource.startsWith("/tor/extra/")) {
if (resource.equals("/tor/server/all")
|| resource.equals("/tor/extra/all")) {
this.lastDownloadedAllDescriptors.put(authority,
this.currentTimestamp);
}
@ -870,9 +907,11 @@ public class RelayDescriptorDownloader {
} catch (UnsupportedEncodingException e) {
/* No way that US-ASCII is not supported. */
}
int start = -1, sig = -1, end = -1;
String startToken = resource.startsWith("/tor/server/") ?
"router " : "extra-info ";
int start = -1;
int sig = -1;
int end = -1;
String startToken = resource.startsWith("/tor/server/")
? "router " : "extra-info ";
String sigToken = "\nrouter-signature\n";
String endToken = "\n-----END SIGNATURE-----\n";
while (end < ascii.length()) {
@ -910,7 +949,8 @@ public class RelayDescriptorDownloader {
} catch (UnsupportedEncodingException e) {
/* No way that US-ASCII is not supported. */
}
int start = -1, end = -1;
int start = -1;
int end = -1;
String startToken = "onion-key\n";
while (end < ascii.length()) {
start = ascii.indexOf(startToken, end);
@ -961,9 +1001,11 @@ public class RelayDescriptorDownloader {
public void writeFile() {
/* Write missing descriptors file to disk. */
int missingConsensuses = 0, missingMicrodescConsensuses = 0,
missingVotes = 0, missingServerDescriptors = 0,
missingExtraInfoDescriptors = 0;
int missingConsensuses = 0;
int missingMicrodescConsensuses = 0;
int missingVotes = 0;
int missingServerDescriptors = 0;
int missingExtraInfoDescriptors = 0;
try {
this.logger.fine("Writing file "
+ this.missingDescriptorsFile.getAbsolutePath() + "...");
@ -972,7 +1014,8 @@ public class RelayDescriptorDownloader {
this.missingDescriptorsFile));
for (Map.Entry<String, String> e :
this.missingDescriptors.entrySet()) {
String key = e.getKey(), value = e.getValue();
String key = e.getKey();
String value = e.getValue();
if (!value.equals("NA")) {
/* Not missing. */
} else if (key.startsWith("consensus,")) {
@ -986,6 +1029,7 @@ public class RelayDescriptorDownloader {
} else if (key.startsWith("extra,")) {
missingExtraInfoDescriptors++;
} else if (key.startsWith("micro,")) {
/* We're counting missing microdescriptors below. */
}
bw.write(key + "," + value + "\n");
}
@ -1059,7 +1103,7 @@ public class RelayDescriptorDownloader {
StringBuilder sb = new StringBuilder();
for (String authority : this.authorities) {
sb.append(" " + authority + "="
+ this.requestsByAuthority.get(authority));
+ this.requestsByAuthority.get(authority));
}
this.logger.info("We sent these numbers of requests to the directory "
+ "authorities:" + sb.toString());

View File

@ -1,7 +1,12 @@
/* Copyright 2010--2014 The Tor Project
/* Copyright 2010--2016 The Tor Project
* See LICENSE for licensing information */
package org.torproject.collector.relaydescs;
import org.apache.commons.codec.binary.Base64;
import org.apache.commons.codec.binary.Hex;
import org.apache.commons.codec.digest.DigestUtils;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.StringReader;
@ -13,10 +18,6 @@ import java.util.TreeSet;
import java.util.logging.Level;
import java.util.logging.Logger;
import org.apache.commons.codec.binary.Base64;
import org.apache.commons.codec.binary.Hex;
import org.apache.commons.codec.digest.DigestUtils;
/**
* Parses relay descriptors including network status consensuses and
* votes, server and extra-info descriptors, and passes the results to the
@ -93,9 +94,11 @@ public class RelayDescriptorParser {
if (line.equals("network-status-version 3 microdesc")) {
statusType = "consensus-microdesc";
}
String validAfterTime = null, fingerprint = null,
dirSource = null;
long validAfter = -1L, dirKeyPublished = -1L;
String validAfterTime = null;
String fingerprint = null;
String dirSource = null;
long validAfter = -1L;
long dirKeyPublished = -1L;
SortedSet<String> dirSources = new TreeSet<String>();
SortedSet<String> serverDescriptors = new TreeSet<String>();
SortedSet<String> serverDescriptorDigests = new TreeSet<String>();
@ -130,8 +133,8 @@ public class RelayDescriptorParser {
} else if (line.startsWith("dir-key-published ")) {
String dirKeyPublishedTime = line.substring(
"dir-key-published ".length());
dirKeyPublished = parseFormat.parse(dirKeyPublishedTime).
getTime();
dirKeyPublished = parseFormat.parse(dirKeyPublishedTime)
.getTime();
} else if (line.startsWith("r ")) {
String[] parts = line.split(" ");
if (parts.length == 8) {
@ -158,12 +161,12 @@ public class RelayDescriptorParser {
microdescriptorKeys.add(validAfterTime + ","
+ lastRelayIdentity + "," + digest256Base64);
String digest256Hex = Hex.encodeHexString(
Base64.decodeBase64(digest256Base64 + "=")).
toLowerCase();
Base64.decodeBase64(digest256Base64 + "="))
.toLowerCase();
microdescriptorDigests.add(digest256Hex);
} else if (parts.length != 3 ||
!parts[2].startsWith("sha256=") ||
parts[2].length() != 50) {
} else if (parts.length != 3
|| !parts[2].startsWith("sha256=")
|| parts[2].length() != 50) {
this.logger.log(Level.WARNING, "Could not parse m line '"
+ line + "' in descriptor. Skipping.");
break;
@ -226,23 +229,24 @@ public class RelayDescriptorParser {
}
}
} else if (line.startsWith("router ")) {
String publishedTime = null, extraInfoDigest = null,
relayIdentifier = null;
String publishedTime = null;
String extraInfoDigest = null;
String relayIdentifier = null;
long published = -1L;
while ((line = br.readLine()) != null) {
if (line.startsWith("published ")) {
publishedTime = line.substring("published ".length());
published = parseFormat.parse(publishedTime).getTime();
} else if (line.startsWith("opt fingerprint") ||
line.startsWith("fingerprint")) {
relayIdentifier = line.substring(line.startsWith("opt ") ?
"opt fingerprint".length() : "fingerprint".length()).
replaceAll(" ", "").toLowerCase();
} else if (line.startsWith("opt extra-info-digest ") ||
line.startsWith("extra-info-digest ")) {
extraInfoDigest = line.startsWith("opt ") ?
line.split(" ")[2].toLowerCase() :
line.split(" ")[1].toLowerCase();
} else if (line.startsWith("opt fingerprint")
|| line.startsWith("fingerprint")) {
relayIdentifier = line.substring(line.startsWith("opt ")
? "opt fingerprint".length() : "fingerprint".length())
.replaceAll(" ", "").toLowerCase();
} else if (line.startsWith("opt extra-info-digest ")
|| line.startsWith("extra-info-digest ")) {
extraInfoDigest = line.startsWith("opt ")
? line.split(" ")[2].toLowerCase()
: line.split(" ")[1].toLowerCase();
}
}
String ascii = new String(data, "US-ASCII");
@ -266,7 +270,8 @@ public class RelayDescriptorParser {
relayIdentifier, digest, extraInfoDigest);
}
} else if (line.startsWith("extra-info ")) {
String publishedTime = null, relayIdentifier = line.split(" ")[2];
String publishedTime = null;
String relayIdentifier = line.split(" ")[2];
long published = -1L;
while ((line = br.readLine()) != null) {
if (line.startsWith("published ")) {

View File

@ -1,7 +1,12 @@
/* Copyright 2012-2016 The Tor Project
* See LICENSE for licensing information */
package org.torproject.collector.torperf;
import org.torproject.collector.main.Configuration;
import org.torproject.collector.main.LockFile;
import org.torproject.collector.main.LoggingConfiguration;
import java.io.BufferedReader;
import java.io.BufferedWriter;
import java.io.File;
@ -22,10 +27,6 @@ import java.util.TreeMap;
import java.util.logging.Level;
import java.util.logging.Logger;
import org.torproject.collector.main.Configuration;
import org.torproject.collector.main.LockFile;
import org.torproject.collector.main.LoggingConfiguration;
/* Download possibly truncated Torperf .data and .extradata files from
* configured sources, append them to the files we already have, and merge
* the two files into the .tpf format. */
@ -97,8 +98,10 @@ public class TorperfDownloader extends Thread {
private File torperfLastMergedFile =
new File("stats/torperf-last-merged");
SortedMap<String, String> lastMergedTimestamps =
new TreeMap<String, String>();
private void readLastMergedTimestamps() {
if (!this.torperfLastMergedFile.exists()) {
return;
@ -109,7 +112,8 @@ public class TorperfDownloader extends Thread {
String line;
while ((line = br.readLine()) != null) {
String[] parts = line.split(" ");
String fileName = null, timestamp = null;
String fileName = null;
String timestamp = null;
if (parts.length == 2) {
try {
Double.parseDouble(parts[1]);
@ -315,11 +319,14 @@ public class TorperfDownloader extends Thread {
}
this.logger.fine("Merging " + dataFile.getAbsolutePath() + " and "
+ extradataFile.getAbsolutePath() + " into .tpf format.");
BufferedReader brD = new BufferedReader(new FileReader(dataFile)),
brE = new BufferedReader(new FileReader(extradataFile));
String lineD = brD.readLine(), lineE = brE.readLine();
int d = 1, e = 1;
String maxDataComplete = null, maxUsedAt = null;
BufferedReader brD = new BufferedReader(new FileReader(dataFile));
BufferedReader brE = new BufferedReader(new FileReader(extradataFile));
String lineD = brD.readLine();
String lineE = brE.readLine();
int d = 1;
int e = 1;
String maxDataComplete = null;
String maxUsedAt = null;
while (lineD != null) {
/* Parse .data line. Every valid .data line will go into the .tpf
@ -363,8 +370,8 @@ public class TorperfDownloader extends Thread {
+ e++ + " which is a BUILDTIMEOUT_SET line.");
lineE = brE.readLine();
continue;
} else if (lineE.startsWith("ok ") ||
lineE.startsWith("error ")) {
} else if (lineE.startsWith("ok ")
|| lineE.startsWith("error ")) {
this.logger.finer("Skipping " + extradataFile.getName() + ":"
+ e++ + " which is in the old format.");
lineE = brE.readLine();
@ -446,6 +453,7 @@ public class TorperfDownloader extends Thread {
}
private SortedMap<Integer, String> dataTimestamps;
private SortedMap<String, String> parseDataLine(String line) {
String[] parts = line.trim().split(" ");
if (line.length() == 0 || parts.length < 20) {
@ -504,18 +512,18 @@ public class TorperfDownloader extends Thread {
String key = keyAndValue[0];
previousKey = key;
String value = keyAndValue[1];
if (value.contains(".") && value.lastIndexOf(".") ==
value.length() - 2) {
if (value.contains(".") && value.lastIndexOf(".")
== value.length() - 2) {
/* Make sure that all floats have two trailing digits. */
value += "0";
}
extradata.put(key, value);
} else if (keyAndValue.length == 1 && previousKey != null) {
String value = keyAndValue[0];
if (previousKey.equals("STREAM_FAIL_REASONS") &&
(value.equals("MISC") || value.equals("EXITPOLICY") ||
value.equals("RESOURCELIMIT") ||
value.equals("RESOLVEFAILED"))) {
if (previousKey.equals("STREAM_FAIL_REASONS")
&& (value.equals("MISC") || value.equals("EXITPOLICY")
|| value.equals("RESOURCELIMIT")
|| value.equals("RESOLVEFAILED"))) {
extradata.put(previousKey, extradata.get(previousKey) + ":"
+ value);
} else {
@ -529,9 +537,13 @@ public class TorperfDownloader extends Thread {
}
private String cachedSource;
private int cachedFileSize;
private String cachedStartDate;
private SortedMap<String, String> cachedTpfLines;
private void writeTpfLine(String source, int fileSize,
SortedMap<String, String> keysAndValues) throws IOException {
StringBuilder sb = new StringBuilder();
@ -547,14 +559,14 @@ public class TorperfDownloader extends Thread {
long startMillis = Long.parseLong(startString.substring(0,
startString.indexOf("."))) * 1000L;
String startDate = dateFormat.format(startMillis);
if (this.cachedTpfLines == null || !source.equals(this.cachedSource) ||
fileSize != this.cachedFileSize ||
!startDate.equals(this.cachedStartDate)) {
if (this.cachedTpfLines == null || !source.equals(this.cachedSource)
|| fileSize != this.cachedFileSize
|| !startDate.equals(this.cachedStartDate)) {
this.writeCachedTpfLines();
this.readTpfLinesToCache(source, fileSize, startDate);
}
if (!this.cachedTpfLines.containsKey(startString) ||
line.length() > this.cachedTpfLines.get(startString).length()) {
if (!this.cachedTpfLines.containsKey(startString)
|| line.length() > this.cachedTpfLines.get(startString).length()) {
this.cachedTpfLines.put(startString, line);
}
}
@ -588,8 +600,8 @@ public class TorperfDownloader extends Thread {
}
private void writeCachedTpfLines() throws IOException {
if (this.cachedSource == null || this.cachedFileSize == 0 ||
this.cachedStartDate == null || this.cachedTpfLines == null) {
if (this.cachedSource == null || this.cachedFileSize == 0
|| this.cachedStartDate == null || this.cachedTpfLines == null) {
return;
}
File tarballFile = new File(torperfOutputDirectory,