mirror of
https://github.com/torproject/collector.git
synced 2024-11-27 03:10:28 +00:00
Checkstyle warnings down to zero.
This commit is contained in:
parent
4383ae9386
commit
ae4be6f06b
@ -16,12 +16,14 @@ public class BridgeDescriptorParser {
|
||||
|
||||
private Logger logger;
|
||||
|
||||
@SuppressWarnings("checkstyle:javadocmethod")
|
||||
public BridgeDescriptorParser(SanitizedBridgesWriter sbw) {
|
||||
this.sbw = sbw;
|
||||
this.logger =
|
||||
LoggerFactory.getLogger(BridgeDescriptorParser.class);
|
||||
}
|
||||
|
||||
@SuppressWarnings("checkstyle:javadocmethod")
|
||||
public void parse(byte[] allData, String dateTime) {
|
||||
try {
|
||||
BufferedReader br = new BufferedReader(new StringReader(
|
||||
|
@ -27,10 +27,11 @@ import java.util.SortedSet;
|
||||
import java.util.Stack;
|
||||
import java.util.TreeSet;
|
||||
|
||||
/**
|
||||
* Reads the half-hourly snapshots of bridge descriptors from Tonga.
|
||||
*/
|
||||
public class BridgeSnapshotReader {
|
||||
|
||||
/**
|
||||
* Reads the half-hourly snapshots of bridge descriptors from Tonga.
|
||||
*/
|
||||
public BridgeSnapshotReader(BridgeDescriptorParser bdp,
|
||||
File bridgeDirectoriesDir, File statsDirectory) {
|
||||
|
||||
|
@ -54,6 +54,7 @@ public class SanitizedBridgesWriter extends Thread {
|
||||
|
||||
private static Logger logger = LoggerFactory.getLogger(SanitizedBridgesWriter.class);
|
||||
|
||||
@SuppressWarnings("checkstyle:javadocmethod")
|
||||
public static void main(Configuration config) throws ConfigurationException {
|
||||
|
||||
logger.info("Starting bridge-descriptors module of CollecTor.");
|
||||
@ -89,11 +90,11 @@ public class SanitizedBridgesWriter extends Thread {
|
||||
*/
|
||||
private File sanitizedBridgesDirectory;
|
||||
|
||||
private boolean replaceIPAddressesWithHashes;
|
||||
private boolean replaceIpAddressesWithHashes;
|
||||
|
||||
private boolean persistenceProblemWithSecrets;
|
||||
|
||||
private SortedMap<String, byte[]> secretsForHashingIPAddresses;
|
||||
private SortedMap<String, byte[]> secretsForHashingIpAddresses;
|
||||
|
||||
private String bridgeSanitizingCutOffTimestamp;
|
||||
|
||||
@ -119,10 +120,6 @@ public class SanitizedBridgesWriter extends Thread {
|
||||
config.getPath(Key.BridgeSnapshotsDirectory).toFile();
|
||||
File sanitizedBridgesDirectory =
|
||||
config.getPath(Key.SanitizedBridgesWriteDirectory).toFile();
|
||||
boolean replaceIPAddressesWithHashes =
|
||||
config.getBool(Key.ReplaceIPAddressesWithHashes);
|
||||
long limitBridgeSanitizingInterval =
|
||||
config.getInt(Key.BridgeDescriptorMappingsLimit);
|
||||
File statsDirectory = new File("stats");
|
||||
|
||||
if (bridgeDirectoriesDirectory == null
|
||||
@ -133,8 +130,8 @@ public class SanitizedBridgesWriter extends Thread {
|
||||
/* Memorize argument values. */
|
||||
this.bridgeDirectoriesDirectory = bridgeDirectoriesDirectory;
|
||||
this.sanitizedBridgesDirectory = sanitizedBridgesDirectory;
|
||||
this.replaceIPAddressesWithHashes = replaceIPAddressesWithHashes;
|
||||
|
||||
this.replaceIpAddressesWithHashes =
|
||||
config.getBool(Key.ReplaceIpAddressesWithHashes);
|
||||
SimpleDateFormat rsyncCatFormat = new SimpleDateFormat(
|
||||
"yyyy-MM-dd-HH-mm-ss");
|
||||
rsyncCatFormat.setTimeZone(TimeZone.getTimeZone("UTC"));
|
||||
@ -142,7 +139,7 @@ public class SanitizedBridgesWriter extends Thread {
|
||||
System.currentTimeMillis());
|
||||
|
||||
/* Initialize secure random number generator if we need it. */
|
||||
if (this.replaceIPAddressesWithHashes) {
|
||||
if (this.replaceIpAddressesWithHashes) {
|
||||
try {
|
||||
this.secureRandom = SecureRandom.getInstance("SHA1PRNG", "SUN");
|
||||
} catch (GeneralSecurityException e) {
|
||||
@ -155,7 +152,7 @@ public class SanitizedBridgesWriter extends Thread {
|
||||
|
||||
/* Read hex-encoded secrets for replacing IP addresses with hashes
|
||||
* from disk. */
|
||||
this.secretsForHashingIPAddresses = new TreeMap<String, byte[]>();
|
||||
this.secretsForHashingIpAddresses = new TreeMap<String, byte[]>();
|
||||
this.bridgeIpSecretsFile = new File(statsDirectory,
|
||||
"bridge-ip-secrets");
|
||||
if (this.bridgeIpSecretsFile.exists()) {
|
||||
@ -177,12 +174,12 @@ public class SanitizedBridgesWriter extends Thread {
|
||||
}
|
||||
String month = parts[0];
|
||||
byte[] secret = Hex.decodeHex(parts[1].toCharArray());
|
||||
this.secretsForHashingIPAddresses.put(month, secret);
|
||||
this.secretsForHashingIpAddresses.put(month, secret);
|
||||
}
|
||||
br.close();
|
||||
if (!this.persistenceProblemWithSecrets) {
|
||||
this.logger.debug("Read "
|
||||
+ this.secretsForHashingIPAddresses.size() + " secrets for "
|
||||
+ this.secretsForHashingIpAddresses.size() + " secrets for "
|
||||
+ "hashing bridge IP addresses.");
|
||||
}
|
||||
} catch (DecoderException e) {
|
||||
@ -198,6 +195,9 @@ public class SanitizedBridgesWriter extends Thread {
|
||||
}
|
||||
}
|
||||
|
||||
long limitBridgeSanitizingInterval =
|
||||
config.getInt(Key.BridgeDescriptorMappingsLimit);
|
||||
|
||||
/* If we're configured to keep secrets only for a limited time, define
|
||||
* the cut-off day and time. */
|
||||
if (limitBridgeSanitizingInterval >= 0L) {
|
||||
@ -249,7 +249,7 @@ public class SanitizedBridgesWriter extends Thread {
|
||||
|
||||
private String scrubIpv4Address(String address, byte[] fingerprintBytes,
|
||||
String published) throws IOException {
|
||||
if (this.replaceIPAddressesWithHashes) {
|
||||
if (this.replaceIpAddressesWithHashes) {
|
||||
if (this.persistenceProblemWithSecrets) {
|
||||
/* There's a persistence problem, so we shouldn't scrub more IP
|
||||
* addresses in this execution. */
|
||||
@ -278,13 +278,12 @@ public class SanitizedBridgesWriter extends Thread {
|
||||
private String scrubIpv6Address(String address, byte[] fingerprintBytes,
|
||||
String published) throws IOException {
|
||||
StringBuilder sb = new StringBuilder("[fd9f:2e19:3bcf::");
|
||||
if (this.replaceIPAddressesWithHashes) {
|
||||
if (this.replaceIpAddressesWithHashes) {
|
||||
if (this.persistenceProblemWithSecrets) {
|
||||
/* There's a persistence problem, so we shouldn't scrub more IP
|
||||
* addresses in this execution. */
|
||||
return null;
|
||||
}
|
||||
byte[] hashInput = new byte[16 + 20 + 19];
|
||||
String[] doubleColonSeparatedParts = address.substring(1,
|
||||
address.length() - 1).split("::", -1);
|
||||
if (doubleColonSeparatedParts.length > 2) {
|
||||
@ -344,6 +343,7 @@ public class SanitizedBridgesWriter extends Thread {
|
||||
/* TODO Invalid IPv6 address. */
|
||||
return null;
|
||||
}
|
||||
byte[] hashInput = new byte[16 + 20 + 19];
|
||||
System.arraycopy(ipBytes, 0, hashInput, 0, 16);
|
||||
System.arraycopy(fingerprintBytes, 0, hashInput, 16, 20);
|
||||
String month = published.substring(0, "yyyy-MM".length());
|
||||
@ -360,12 +360,12 @@ public class SanitizedBridgesWriter extends Thread {
|
||||
}
|
||||
|
||||
private byte[] getSecretForMonth(String month) throws IOException {
|
||||
if (!this.secretsForHashingIPAddresses.containsKey(month)
|
||||
|| this.secretsForHashingIPAddresses.get(month).length == 31) {
|
||||
if (!this.secretsForHashingIpAddresses.containsKey(month)
|
||||
|| this.secretsForHashingIpAddresses.get(month).length == 31) {
|
||||
byte[] secret = new byte[50];
|
||||
this.secureRandom.nextBytes(secret);
|
||||
if (this.secretsForHashingIPAddresses.containsKey(month)) {
|
||||
System.arraycopy(this.secretsForHashingIPAddresses.get(month), 0,
|
||||
if (this.secretsForHashingIpAddresses.containsKey(month)) {
|
||||
System.arraycopy(this.secretsForHashingIpAddresses.get(month), 0,
|
||||
secret, 0, 31);
|
||||
}
|
||||
if (month.compareTo(
|
||||
@ -393,9 +393,9 @@ public class SanitizedBridgesWriter extends Thread {
|
||||
throw new IOException(e);
|
||||
}
|
||||
}
|
||||
this.secretsForHashingIPAddresses.put(month, secret);
|
||||
this.secretsForHashingIpAddresses.put(month, secret);
|
||||
}
|
||||
return this.secretsForHashingIPAddresses.get(month);
|
||||
return this.secretsForHashingIpAddresses.get(month);
|
||||
}
|
||||
|
||||
private String maxNetworkStatusPublishedTime = "1970-01-01 00:00:00";
|
||||
@ -467,9 +467,7 @@ public class SanitizedBridgesWriter extends Thread {
|
||||
|
||||
/* Parse the relevant parts of this r line. */
|
||||
String[] parts = line.split(" ");
|
||||
String nickname = parts[1];
|
||||
fingerprintBytes = Base64.decodeBase64(parts[2] + "==");
|
||||
String descriptorIdentifier = parts[3];
|
||||
descPublicationTime = parts[4] + " " + parts[5];
|
||||
String address = parts[6];
|
||||
String orPort = parts[7];
|
||||
@ -489,12 +487,14 @@ public class SanitizedBridgesWriter extends Thread {
|
||||
hashedBridgeIdentity).substring(0, 27);
|
||||
hashedBridgeIdentityHex = Hex.encodeHexString(
|
||||
hashedBridgeIdentity);
|
||||
String descriptorIdentifier = parts[3];
|
||||
String hashedDescriptorIdentifier = Base64.encodeBase64String(
|
||||
DigestUtils.sha(Base64.decodeBase64(descriptorIdentifier
|
||||
+ "=="))).substring(0, 27);
|
||||
String scrubbedAddress = scrubIpv4Address(address,
|
||||
fingerprintBytes,
|
||||
descPublicationTime);
|
||||
String nickname = parts[1];
|
||||
scrubbed.append("r " + nickname + " "
|
||||
+ hashedBridgeIdentityBase64 + " "
|
||||
+ hashedDescriptorIdentifier + " " + descPublicationTime
|
||||
@ -1242,8 +1242,8 @@ public class SanitizedBridgesWriter extends Thread {
|
||||
public void finishWriting() {
|
||||
|
||||
/* Delete secrets that we don't need anymore. */
|
||||
if (!this.secretsForHashingIPAddresses.isEmpty()
|
||||
&& this.secretsForHashingIPAddresses.firstKey().compareTo(
|
||||
if (!this.secretsForHashingIpAddresses.isEmpty()
|
||||
&& this.secretsForHashingIpAddresses.firstKey().compareTo(
|
||||
this.bridgeSanitizingCutOffTimestamp) < 0) {
|
||||
try {
|
||||
int kept = 0;
|
||||
@ -1251,7 +1251,7 @@ public class SanitizedBridgesWriter extends Thread {
|
||||
BufferedWriter bw = new BufferedWriter(new FileWriter(
|
||||
this.bridgeIpSecretsFile));
|
||||
for (Map.Entry<String, byte[]> e :
|
||||
this.secretsForHashingIPAddresses.entrySet()) {
|
||||
this.secretsForHashingIpAddresses.entrySet()) {
|
||||
if (e.getKey().compareTo(
|
||||
this.bridgeSanitizingCutOffTimestamp) < 0) {
|
||||
deleted++;
|
||||
@ -1310,7 +1310,7 @@ public class SanitizedBridgesWriter extends Thread {
|
||||
}
|
||||
}
|
||||
|
||||
/* Delete all files from the rsync directory that have not been modified
|
||||
/** Delete all files from the rsync directory that have not been modified
|
||||
* in the last three days, and remove the .tmp extension from newly
|
||||
* written files. */
|
||||
public void cleanUpRsyncDirectory() {
|
||||
|
@ -32,7 +32,7 @@ public enum Key {
|
||||
ImportCachedRelayDescriptors(Boolean.class),
|
||||
ImportDirectoryArchives(Boolean.class),
|
||||
KeepDirectoryArchiveImportHistory(Boolean.class),
|
||||
ReplaceIPAddressesWithHashes(Boolean.class),
|
||||
ReplaceIpAddressesWithHashes(Boolean.class),
|
||||
BridgeDescriptorMappingsLimit(Integer.class),
|
||||
SanitizedBridgesWriteDirectory(Path.class),
|
||||
TorperfOutputDirectory(Path.class),
|
||||
|
@ -36,6 +36,7 @@ public class ExitListDownloader extends Thread {
|
||||
|
||||
private static Logger logger = LoggerFactory.getLogger(ExitListDownloader.class);
|
||||
|
||||
@SuppressWarnings("checkstyle:javadocmethod")
|
||||
public static void main(Configuration config) throws ConfigurationException {
|
||||
logger.info("Starting exit-lists module of CollecTor.");
|
||||
|
||||
@ -54,6 +55,7 @@ public class ExitListDownloader extends Thread {
|
||||
|
||||
public ExitListDownloader(Configuration config) {}
|
||||
|
||||
@Override
|
||||
public void run() {
|
||||
try {
|
||||
startProcessing();
|
||||
@ -79,8 +81,8 @@ public class ExitListDownloader extends Thread {
|
||||
+ "\n");
|
||||
String exitAddressesUrl =
|
||||
"http://exitlist.torproject.org/exit-addresses";
|
||||
URL u = new URL(exitAddressesUrl);
|
||||
HttpURLConnection huc = (HttpURLConnection) u.openConnection();
|
||||
URL url = new URL(exitAddressesUrl);
|
||||
HttpURLConnection huc = (HttpURLConnection) url.openConnection();
|
||||
huc.setRequestMethod("GET");
|
||||
huc.connect();
|
||||
int response = huc.getResponseCode();
|
||||
@ -194,7 +196,7 @@ public class ExitListDownloader extends Thread {
|
||||
this.cleanUpRsyncDirectory();
|
||||
}
|
||||
|
||||
/* Delete all files from the rsync directory that have not been modified
|
||||
/** Delete all files from the rsync directory that have not been modified
|
||||
* in the last three days. */
|
||||
public void cleanUpRsyncDirectory() {
|
||||
long cutOffMillis = System.currentTimeMillis()
|
||||
|
@ -52,6 +52,7 @@ public class CreateIndexJson {
|
||||
|
||||
static final TimeZone dateTimezone = TimeZone.getTimeZone("UTC");
|
||||
|
||||
@SuppressWarnings("checkstyle:javadocmethod")
|
||||
public static void main(Configuration config)
|
||||
throws ConfigurationException, IOException {
|
||||
indexJsonFile = new File(config.getPath(Key.IndexPath).toFile(), "index.json");
|
||||
@ -73,11 +74,12 @@ public class CreateIndexJson {
|
||||
this.directories = directories;
|
||||
}
|
||||
|
||||
public int compareTo(DirectoryNode o) {
|
||||
return this.path.compareTo(o.path);
|
||||
public int compareTo(DirectoryNode other) {
|
||||
return this.path.compareTo(other.path);
|
||||
}
|
||||
}
|
||||
|
||||
@SuppressWarnings({"checkstyle:membername", "checkstyle:parametername"})
|
||||
static class IndexNode {
|
||||
String index_created;
|
||||
String path;
|
||||
@ -94,6 +96,7 @@ public class CreateIndexJson {
|
||||
}
|
||||
}
|
||||
|
||||
@SuppressWarnings({"checkstyle:membername", "checkstyle:parametername"})
|
||||
static class FileNode implements Comparable<FileNode> {
|
||||
String path;
|
||||
long size;
|
||||
@ -105,8 +108,8 @@ public class CreateIndexJson {
|
||||
this.last_modified = last_modified;
|
||||
}
|
||||
|
||||
public int compareTo(FileNode o) {
|
||||
return this.path.compareTo(o.path);
|
||||
public int compareTo(FileNode other) {
|
||||
return this.path.compareTo(other.path);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -28,6 +28,7 @@ public class LockFile {
|
||||
this.moduleName = moduleName;
|
||||
}
|
||||
|
||||
@SuppressWarnings("checkstyle:javadocmethod")
|
||||
public boolean acquireLock() {
|
||||
this.logger.debug("Trying to acquire lock...");
|
||||
try {
|
||||
@ -53,6 +54,7 @@ public class LockFile {
|
||||
}
|
||||
}
|
||||
|
||||
@SuppressWarnings("checkstyle:javadocmethod")
|
||||
public void releaseLock() {
|
||||
this.logger.debug("Releasing lock...");
|
||||
this.lockFile.delete();
|
||||
|
@ -43,6 +43,7 @@ public class ArchiveReader {
|
||||
private Map<String, Set<String>> microdescriptorValidAfterTimes =
|
||||
new HashMap<String, Set<String>>();
|
||||
|
||||
@SuppressWarnings("checkstyle:javadocmethod")
|
||||
public ArchiveReader(RelayDescriptorParser rdp, File archivesDirectory,
|
||||
File statsDirectory, boolean keepImportHistory) {
|
||||
|
||||
@ -271,6 +272,7 @@ public class ArchiveReader {
|
||||
+ ignoredFiles + " files.");
|
||||
}
|
||||
|
||||
@SuppressWarnings("checkstyle:javadocmethod")
|
||||
public void haveParsedMicrodescConsensus(String validAfterTime,
|
||||
SortedSet<String> microdescriptorDigests) {
|
||||
for (String microdescriptor : microdescriptorDigests) {
|
||||
|
@ -107,6 +107,8 @@ public class ArchiveWriter extends Thread {
|
||||
private static final String CONSENSUS_MICRODESC = "consensus-microdesc";
|
||||
private static final String MICRODESC = "microdesc";
|
||||
private static final String MICRODESCS = "microdescs";
|
||||
|
||||
@SuppressWarnings("checkstyle:javadocmethod")
|
||||
public static void main(Configuration config) throws ConfigurationException {
|
||||
|
||||
logger.info("Starting relay-descriptors module of CollecTor.");
|
||||
@ -132,6 +134,7 @@ public class ArchiveWriter extends Thread {
|
||||
logger.info("Terminating relay-descriptors module of CollecTor.");
|
||||
}
|
||||
|
||||
@SuppressWarnings("checkstyle:javadocmethod")
|
||||
public ArchiveWriter(Configuration config) throws ConfigurationException {
|
||||
this.config = config;
|
||||
storedServerDescriptorsFile =
|
||||
@ -142,6 +145,7 @@ public class ArchiveWriter extends Thread {
|
||||
new File(config.getPath(Key.StatsPath).toFile(), "stored-microdescriptors");
|
||||
}
|
||||
|
||||
@Override
|
||||
public void run() {
|
||||
try {
|
||||
startProcessing();
|
||||
@ -315,6 +319,7 @@ public class ArchiveWriter extends Thread {
|
||||
}
|
||||
}
|
||||
|
||||
@SuppressWarnings("checkstyle:javadocmethod")
|
||||
public void intermediateStats(String event) {
|
||||
intermediateStats.append("While " + event + ", we stored "
|
||||
+ this.storedConsensusesCounter + " consensus(es), "
|
||||
@ -365,8 +370,6 @@ public class ArchiveWriter extends Thread {
|
||||
this.storedConsensuses.entrySet()) {
|
||||
long validAfterMillis = c.getKey();
|
||||
String validAfterTime = dateTimeFormat.format(validAfterMillis);
|
||||
int allVotes = this.expectedVotes.containsKey(validAfterMillis)
|
||||
? this.expectedVotes.get(validAfterMillis) : 0;
|
||||
int foundVotes = 0;
|
||||
if (this.storedVotes.containsKey(validAfterMillis)) {
|
||||
foundVotes = this.storedVotes.get(validAfterMillis).size();
|
||||
@ -444,6 +447,8 @@ public class ArchiveWriter extends Thread {
|
||||
}
|
||||
}
|
||||
sb.append("\nC, " + validAfterTime);
|
||||
int allVotes = this.expectedVotes.containsKey(validAfterMillis)
|
||||
? this.expectedVotes.get(validAfterMillis) : 0;
|
||||
if (allVotes > 0) {
|
||||
sb.append(String.format(", %d/%d V (%.1f%%)", foundVotes, allVotes,
|
||||
100.0D * (double) foundVotes / (double) allVotes));
|
||||
@ -565,7 +570,7 @@ public class ArchiveWriter extends Thread {
|
||||
}
|
||||
}
|
||||
|
||||
/* Delete all files from the rsync directory that have not been modified
|
||||
/** Delete all files from the rsync directory that have not been modified
|
||||
* in the last three days (except for microdescriptors which are kept
|
||||
* for up to thirty days), and remove the .tmp extension from newly
|
||||
* written files. */
|
||||
@ -644,6 +649,7 @@ public class ArchiveWriter extends Thread {
|
||||
}
|
||||
}
|
||||
|
||||
@SuppressWarnings("checkstyle:javadocmethod")
|
||||
public void storeConsensus(byte[] data, long validAfter,
|
||||
SortedSet<String> dirSources,
|
||||
SortedSet<String> serverDescriptorDigests) {
|
||||
@ -666,6 +672,7 @@ public class ArchiveWriter extends Thread {
|
||||
}
|
||||
}
|
||||
|
||||
@SuppressWarnings("checkstyle:javadocmethod")
|
||||
public void storeMicrodescConsensus(byte[] data, long validAfter,
|
||||
SortedSet<String> microdescriptorDigests) {
|
||||
SimpleDateFormat yearMonthDirectoryFormat = new SimpleDateFormat(
|
||||
@ -692,6 +699,7 @@ public class ArchiveWriter extends Thread {
|
||||
}
|
||||
}
|
||||
|
||||
@SuppressWarnings("checkstyle:javadocmethod")
|
||||
public void storeVote(byte[] data, long validAfter,
|
||||
String fingerprint, String digest,
|
||||
SortedSet<String> serverDescriptorDigests) {
|
||||
@ -719,6 +727,7 @@ public class ArchiveWriter extends Thread {
|
||||
}
|
||||
}
|
||||
|
||||
@SuppressWarnings("checkstyle:javadocmethod")
|
||||
public void storeCertificate(byte[] data, String fingerprint,
|
||||
long published) {
|
||||
SimpleDateFormat printFormat = new SimpleDateFormat(
|
||||
@ -732,6 +741,7 @@ public class ArchiveWriter extends Thread {
|
||||
}
|
||||
}
|
||||
|
||||
@SuppressWarnings("checkstyle:javadocmethod")
|
||||
public void storeServerDescriptor(byte[] data, String digest,
|
||||
long published, String extraInfoDigest) {
|
||||
SimpleDateFormat printFormat = new SimpleDateFormat("yyyy/MM/");
|
||||
@ -760,6 +770,7 @@ public class ArchiveWriter extends Thread {
|
||||
}
|
||||
}
|
||||
|
||||
@SuppressWarnings("checkstyle:javadocmethod")
|
||||
public void storeExtraInfoDescriptor(byte[] data,
|
||||
String extraInfoDigest, long published) {
|
||||
SimpleDateFormat descriptorFormat = new SimpleDateFormat("yyyy/MM/");
|
||||
@ -787,6 +798,7 @@ public class ArchiveWriter extends Thread {
|
||||
}
|
||||
}
|
||||
|
||||
@SuppressWarnings("checkstyle:javadocmethod")
|
||||
public void storeMicrodescriptor(byte[] data,
|
||||
String microdescriptorDigest, long validAfter) {
|
||||
/* TODO We could check here whether we already stored the
|
||||
|
@ -35,6 +35,8 @@ import java.util.TreeSet;
|
||||
* into directory structure in directory-archive/.
|
||||
*/
|
||||
public class CachedRelayDescriptorReader {
|
||||
|
||||
@SuppressWarnings("checkstyle:javadocmethod")
|
||||
public CachedRelayDescriptorReader(RelayDescriptorParser rdp,
|
||||
String[] inputDirectories, File statsDirectory) {
|
||||
|
||||
|
@ -68,6 +68,7 @@ public class ReferenceChecker {
|
||||
|
||||
private static final long THIRTY_DAYS = 30L * ONE_DAY;
|
||||
|
||||
@SuppressWarnings("checkstyle:javadocmethod")
|
||||
public ReferenceChecker(File descriptorsDir, File referencesFile,
|
||||
File historyFile) {
|
||||
this.descriptorsDir = descriptorsDir;
|
||||
@ -75,6 +76,7 @@ public class ReferenceChecker {
|
||||
this.historyFile = historyFile;
|
||||
}
|
||||
|
||||
@SuppressWarnings("checkstyle:javadocmethod")
|
||||
public void check() {
|
||||
this.getCurrentTimeMillis();
|
||||
this.readReferencesFile();
|
||||
|
@ -49,13 +49,14 @@ public class RelayDescriptorDownloader {
|
||||
/**
|
||||
* Text file containing the descriptors that we are missing and that we
|
||||
* want to download. Lines are formatted as:
|
||||
*
|
||||
* - "consensus,<validafter>,<parsed>",
|
||||
* - "consensus-microdesc,<validafter>,<parsed>",
|
||||
* - "vote,<validafter>,<fingerprint>,<parsed>",
|
||||
* - "server,<published>,<relayid>,<descid>,<parsed>",
|
||||
* - "extra,<published>,<relayid>,<descid>,<parsed>", or
|
||||
* - "micro,<validafter>,<relayid>,<descid>,<parsed>".
|
||||
* <p>
|
||||
* - "consensus,<validafter>,<parsed>",
|
||||
* - "consensus-microdesc,<validafter>,<parsed>",
|
||||
* - "vote,<validafter>,<fingerprint>,<parsed>",
|
||||
* - "server,<published>,<relayid>,<descid>,<parsed>",
|
||||
* - "extra,<published>,<relayid>,<descid>,<parsed>", or
|
||||
* - "micro,<validafter>,<relayid>,<descid>,<parsed>".
|
||||
* </p>
|
||||
*/
|
||||
private File missingDescriptorsFile;
|
||||
|
||||
@ -69,10 +70,10 @@ public class RelayDescriptorDownloader {
|
||||
|
||||
/**
|
||||
* Map from base64 microdescriptor digests to keys in missingDescriptors
|
||||
* ("micro,<validafter>,<relayid>,<descid>"). We need this map, because
|
||||
* we can't learn <validafter> or <relayid> from parsing
|
||||
* microdescriptors, but we need to know <validafter> to store
|
||||
* microdescriptors to disk and both <validafter> and <relayid> to
|
||||
* ("micro,<validafter>,<relayid>,<descid>"). We need this map, because
|
||||
* we can't learn <validafter> or <relayid> from parsing
|
||||
* microdescriptors, but we need to know <validafter> to store
|
||||
* microdescriptors to disk and both <validafter> and <relayid> to
|
||||
* remove microdescriptors from the missing list. There are potentially
|
||||
* many matching keys in missingDescriptors for the same microdescriptor
|
||||
* digest. Also, in rare cases relays share the same microdescriptor
|
||||
@ -83,7 +84,7 @@ public class RelayDescriptorDownloader {
|
||||
|
||||
/**
|
||||
* Set of microdescriptor digests that are currently missing. Used for
|
||||
* logging statistics instead of "micro,<validafter>,..." keys which may
|
||||
* logging statistics instead of "micro,<validafter>,..." keys which may
|
||||
* contain the same microdescriptor digest multiple times.
|
||||
*/
|
||||
private Set<String> missingMicrodescriptors;
|
||||
@ -122,54 +123,54 @@ public class RelayDescriptorDownloader {
|
||||
private List<String> authorityFingerprints;
|
||||
|
||||
/**
|
||||
* Should we try to download the current consensus if we don't have it?
|
||||
* Try to download the current consensus if we don't have it.
|
||||
*/
|
||||
private boolean downloadCurrentConsensus;
|
||||
|
||||
/**
|
||||
* Should we try to download the current microdesc consensus if we don't
|
||||
* have it?
|
||||
* Try to download the current microdesc consensus if we don't
|
||||
* have it.
|
||||
*/
|
||||
private boolean downloadCurrentMicrodescConsensus;
|
||||
|
||||
/**
|
||||
* Should we try to download current votes if we don't have them?
|
||||
* Try to download current votes if we don't have them.
|
||||
*/
|
||||
private boolean downloadCurrentVotes;
|
||||
|
||||
/**
|
||||
* Should we try to download missing server descriptors that have been
|
||||
* published within the past 24 hours?
|
||||
* Try to download missing server descriptors that have been
|
||||
* published within the past 24 hours.
|
||||
*/
|
||||
private boolean downloadMissingServerDescriptors;
|
||||
|
||||
/**
|
||||
* Should we try to download missing extra-info descriptors that have
|
||||
* been published within the past 24 hours?
|
||||
* Try to download missing extra-info descriptors that have
|
||||
* been published within the past 24 hours.
|
||||
*/
|
||||
private boolean downloadMissingExtraInfos;
|
||||
|
||||
/**
|
||||
* Should we try to download missing microdescriptors that have been
|
||||
* published within the past 24 hours?
|
||||
* Try to download missing microdescriptors that have been
|
||||
* published within the past 24 hours.
|
||||
*/
|
||||
private boolean downloadMissingMicrodescriptors;
|
||||
|
||||
/**
|
||||
* Should we try to download all server descriptors from the authorities
|
||||
* once every 24 hours?
|
||||
* Try to download all server descriptors from the authorities
|
||||
* once every 24 hours.
|
||||
*/
|
||||
private boolean downloadAllServerDescriptors;
|
||||
|
||||
/**
|
||||
* Should we try to download all extra-info descriptors from the
|
||||
* authorities once every 24 hours?
|
||||
* Try to download all extra-info descriptors from the
|
||||
* authorities once every 24 hours.
|
||||
*/
|
||||
private boolean downloadAllExtraInfos;
|
||||
|
||||
/**
|
||||
* Should we download zlib-compressed versions of descriptors by adding
|
||||
* ".z" to URLs?
|
||||
* Download zlib-compressed versions of descriptors by adding
|
||||
* ".z" to URLs.
|
||||
*/
|
||||
private boolean downloadCompressed;
|
||||
|
||||
@ -475,7 +476,7 @@ public class RelayDescriptorDownloader {
|
||||
* We have parsed a consensus. Take this consensus off the missing list
|
||||
* and add the votes created by the given <code>authorities</code> and
|
||||
* the <code>serverDescriptors</code> which are in the format
|
||||
* "<published>,<relayid>,<descid>" to that list.
|
||||
* "<published>,<relayid>,<descid>" to that list.
|
||||
*/
|
||||
public void haveParsedConsensus(String validAfter,
|
||||
Set<String> authorities, Set<String> serverDescriptors) {
|
||||
@ -512,7 +513,7 @@ public class RelayDescriptorDownloader {
|
||||
/**
|
||||
* We have parsed a microdesc consensus. Take this microdesc consensus
|
||||
* off the missing list and add the <code>microdescriptors</code> which
|
||||
* are in the format "<validafter>,<relayid>,<descid>" to that
|
||||
* are in the format "<validafter>,<relayid>,<descid>" to that
|
||||
* list.
|
||||
*/
|
||||
public void haveParsedMicrodescConsensus(String validAfter,
|
||||
@ -571,7 +572,7 @@ public class RelayDescriptorDownloader {
|
||||
/**
|
||||
* We have parsed a vote. Take this vote off the missing list and add
|
||||
* the <code>serverDescriptors</code> which are in the format
|
||||
* "<published>,<relayid>,<descid>" to that list.
|
||||
* "<published>,<relayid>,<descid>" to that list.
|
||||
*/
|
||||
public void haveParsedVote(String validAfter, String fingerprint,
|
||||
Set<String> serverDescriptors) {
|
||||
@ -865,8 +866,8 @@ public class RelayDescriptorDownloader {
|
||||
String fullUrl = "http://" + authority + resource
|
||||
+ (this.downloadCompressed && !resource.startsWith("/tor/extra/")
|
||||
? ".z" : "");
|
||||
URL u = new URL(fullUrl);
|
||||
HttpURLConnection huc = (HttpURLConnection) u.openConnection();
|
||||
URL url = new URL(fullUrl);
|
||||
HttpURLConnection huc = (HttpURLConnection) url.openConnection();
|
||||
huc.setRequestMethod("GET");
|
||||
huc.connect();
|
||||
int response = huc.getResponseCode();
|
||||
@ -1038,7 +1039,6 @@ public class RelayDescriptorDownloader {
|
||||
this.logger.warn("Failed writing "
|
||||
+ this.missingDescriptorsFile.getAbsolutePath() + "!", e);
|
||||
}
|
||||
int missingMicrodescriptors = this.missingMicrodescriptors.size();
|
||||
|
||||
/* Write text file containing the directory authorities and when we
|
||||
* last downloaded all server and extra-info descriptors from them to
|
||||
@ -1124,7 +1124,8 @@ public class RelayDescriptorDownloader {
|
||||
+ missingMicrodescConsensuses + " microdesc consensus(es), "
|
||||
+ missingVotes + " vote(s), " + missingServerDescriptors
|
||||
+ " server descriptor(s), " + missingExtraInfoDescriptors
|
||||
+ " extra-info descriptor(s), and " + missingMicrodescriptors
|
||||
+ " extra-info descriptor(s), and "
|
||||
+ this.missingMicrodescriptors.size()
|
||||
+ " microdescriptor(s), some of which we may try in the next "
|
||||
+ "execution.");
|
||||
}
|
||||
|
@ -70,6 +70,7 @@ public class RelayDescriptorParser {
|
||||
this.ar = ar;
|
||||
}
|
||||
|
||||
@SuppressWarnings("checkstyle:javadocmethod")
|
||||
public boolean parse(byte[] data) {
|
||||
boolean stored = false;
|
||||
try {
|
||||
@ -325,6 +326,7 @@ public class RelayDescriptorParser {
|
||||
return stored;
|
||||
}
|
||||
|
||||
@SuppressWarnings("checkstyle:javadocmethod")
|
||||
public void storeMicrodescriptor(byte[] data, String digest256Hex,
|
||||
String digest256Base64, long validAfter) {
|
||||
if (this.aw != null) {
|
||||
|
@ -36,6 +36,7 @@ import java.util.TreeMap;
|
||||
public class TorperfDownloader extends Thread {
|
||||
private static Logger logger = LoggerFactory.getLogger(TorperfDownloader.class);
|
||||
|
||||
@SuppressWarnings("checkstyle:javadocmethod")
|
||||
public static void main(Configuration config) throws ConfigurationException {
|
||||
logger.info("Starting torperf module of CollecTor.");
|
||||
|
||||
@ -63,6 +64,7 @@ public class TorperfDownloader extends Thread {
|
||||
private String[] torperfFilesLines = null;
|
||||
private SimpleDateFormat dateFormat;
|
||||
|
||||
@Override
|
||||
public void run() {
|
||||
try {
|
||||
startProcessing();
|
||||
@ -83,10 +85,10 @@ public class TorperfDownloader extends Thread {
|
||||
this.dateFormat.setTimeZone(TimeZone.getTimeZone("UTC"));
|
||||
this.readLastMergedTimestamps();
|
||||
for (String[] source : config.getStringArrayArray(Key.TorperfSources)) {
|
||||
torperfSources.put(source[0], source[1]);
|
||||
torperfSources.put(source[0], source[1]);
|
||||
}
|
||||
for (String torperfFilesLine : this.torperfFilesLines) {
|
||||
this.downloadAndMergeFiles(torperfFilesLine);
|
||||
this.downloadAndMergeFiles(torperfFilesLine);
|
||||
}
|
||||
this.writeLastMergedTimestamps();
|
||||
|
||||
@ -211,7 +213,7 @@ public class TorperfDownloader extends Thread {
|
||||
}
|
||||
}
|
||||
|
||||
private boolean downloadAndAppendFile(String url, File outputFile,
|
||||
private boolean downloadAndAppendFile(String urlString, File outputFile,
|
||||
boolean isDataFile) {
|
||||
|
||||
/* Read an existing output file to determine which line will be the
|
||||
@ -241,10 +243,10 @@ public class TorperfDownloader extends Thread {
|
||||
}
|
||||
try {
|
||||
this.logger.debug("Downloading " + (isDataFile ? ".data" :
|
||||
".extradata") + " file from '" + url + "' and merging it into "
|
||||
+ "'" + outputFile.getAbsolutePath() + "'.");
|
||||
URL u = new URL(url);
|
||||
HttpURLConnection huc = (HttpURLConnection) u.openConnection();
|
||||
".extradata") + " file from '" + urlString + "' and merging it "
|
||||
+ "into '" + outputFile.getAbsolutePath() + "'.");
|
||||
URL url = new URL(urlString);
|
||||
HttpURLConnection huc = (HttpURLConnection) url.openConnection();
|
||||
huc.setRequestMethod("GET");
|
||||
huc.connect();
|
||||
BufferedReader br = new BufferedReader(new InputStreamReader(
|
||||
@ -276,7 +278,7 @@ public class TorperfDownloader extends Thread {
|
||||
}
|
||||
} catch (IOException e) {
|
||||
this.logger.warn("Failed downloading and/or merging '"
|
||||
+ url + "'.", e);
|
||||
+ urlString + "'.", e);
|
||||
return false;
|
||||
}
|
||||
if (lastTimestampLine == null) {
|
||||
@ -320,8 +322,8 @@ public class TorperfDownloader extends Thread {
|
||||
BufferedReader brE = new BufferedReader(new FileReader(extradataFile));
|
||||
String lineD = brD.readLine();
|
||||
String lineE = brE.readLine();
|
||||
int d = 1;
|
||||
int e = 1;
|
||||
int skippedLineCount = 1;
|
||||
int skippedExtraDataCount = 1;
|
||||
String maxDataComplete = null;
|
||||
String maxUsedAt = null;
|
||||
while (lineD != null) {
|
||||
@ -331,14 +333,14 @@ public class TorperfDownloader extends Thread {
|
||||
* file or without it. */
|
||||
if (lineD.isEmpty()) {
|
||||
this.logger.trace("Skipping empty line " + dataFile.getName()
|
||||
+ ":" + d++ + ".");
|
||||
+ ":" + skippedLineCount++ + ".");
|
||||
lineD = brD.readLine();
|
||||
continue;
|
||||
}
|
||||
SortedMap<String, String> data = this.parseDataLine(lineD);
|
||||
if (data == null) {
|
||||
this.logger.trace("Skipping illegal line " + dataFile.getName()
|
||||
+ ":" + d++ + " '" + lineD + "'.");
|
||||
+ ":" + skippedLineCount++ + " '" + lineD + "'.");
|
||||
lineD = brD.readLine();
|
||||
continue;
|
||||
}
|
||||
@ -346,7 +348,7 @@ public class TorperfDownloader extends Thread {
|
||||
double dataCompleteSeconds = Double.parseDouble(dataComplete);
|
||||
if (skipUntil != null && dataComplete.compareTo(skipUntil) < 0) {
|
||||
this.logger.trace("Skipping " + dataFile.getName() + ":"
|
||||
+ d++ + " which we already processed before.");
|
||||
+ skippedLineCount++ + " which we already processed before.");
|
||||
lineD = brD.readLine();
|
||||
continue;
|
||||
}
|
||||
@ -358,33 +360,35 @@ public class TorperfDownloader extends Thread {
|
||||
while (lineE != null) {
|
||||
if (lineE.isEmpty()) {
|
||||
this.logger.trace("Skipping " + extradataFile.getName() + ":"
|
||||
+ e++ + " which is empty.");
|
||||
+ skippedExtraDataCount++ + " which is empty.");
|
||||
lineE = brE.readLine();
|
||||
continue;
|
||||
}
|
||||
if (lineE.startsWith("BUILDTIMEOUT_SET ")) {
|
||||
this.logger.trace("Skipping " + extradataFile.getName() + ":"
|
||||
+ e++ + " which is a BUILDTIMEOUT_SET line.");
|
||||
+ skippedExtraDataCount++ + " which is a BUILDTIMEOUT_SET "
|
||||
+ "line.");
|
||||
lineE = brE.readLine();
|
||||
continue;
|
||||
} else if (lineE.startsWith("ok ")
|
||||
|| lineE.startsWith("error ")) {
|
||||
this.logger.trace("Skipping " + extradataFile.getName() + ":"
|
||||
+ e++ + " which is in the old format.");
|
||||
+ skippedExtraDataCount++ + " which is in the old format.");
|
||||
lineE = brE.readLine();
|
||||
continue;
|
||||
}
|
||||
extradata = this.parseExtradataLine(lineE);
|
||||
if (extradata == null) {
|
||||
this.logger.trace("Skipping Illegal line "
|
||||
+ extradataFile.getName() + ":" + e++ + " '" + lineE
|
||||
+ "'.");
|
||||
+ extradataFile.getName() + ":" + skippedExtraDataCount++
|
||||
+ " '" + lineE + "'.");
|
||||
lineE = brE.readLine();
|
||||
continue;
|
||||
}
|
||||
if (!extradata.containsKey("USED_AT")) {
|
||||
this.logger.trace("Skipping " + extradataFile.getName() + ":"
|
||||
+ e++ + " which doesn't contain a USED_AT element.");
|
||||
+ skippedExtraDataCount++ + " which doesn't contain a "
|
||||
+ "USED_AT element.");
|
||||
lineE = brE.readLine();
|
||||
continue;
|
||||
}
|
||||
@ -392,14 +396,15 @@ public class TorperfDownloader extends Thread {
|
||||
double usedAtSeconds = Double.parseDouble(usedAt);
|
||||
if (skipUntil != null && usedAt.compareTo(skipUntil) < 0) {
|
||||
this.logger.trace("Skipping " + extradataFile.getName() + ":"
|
||||
+ e++ + " which we already processed before.");
|
||||
+ skippedExtraDataCount++ + " which we already processed "
|
||||
+ "before.");
|
||||
lineE = brE.readLine();
|
||||
continue;
|
||||
}
|
||||
maxUsedAt = usedAt;
|
||||
if (Math.abs(usedAtSeconds - dataCompleteSeconds) <= 1.0) {
|
||||
this.logger.debug("Merging " + extradataFile.getName() + ":"
|
||||
+ e++ + " into the current .data line.");
|
||||
+ skippedExtraDataCount++ + " into the current .data line.");
|
||||
lineE = brE.readLine();
|
||||
break;
|
||||
} else if (usedAtSeconds > dataCompleteSeconds) {
|
||||
@ -409,8 +414,9 @@ public class TorperfDownloader extends Thread {
|
||||
break;
|
||||
} else {
|
||||
this.logger.trace("Skipping " + extradataFile.getName() + ":"
|
||||
+ e++ + " which is too old to be merged with "
|
||||
+ dataFile.getName() + ":" + d + ".");
|
||||
+ skippedExtraDataCount++ + " which is too old to be "
|
||||
+ "merged with " + dataFile.getName() + ":"
|
||||
+ skippedLineCount + ".");
|
||||
lineE = brE.readLine();
|
||||
continue;
|
||||
}
|
||||
@ -424,14 +430,15 @@ public class TorperfDownloader extends Thread {
|
||||
}
|
||||
keysAndValues.putAll(data);
|
||||
keysAndValues.putAll(config);
|
||||
this.logger.debug("Writing " + dataFile.getName() + ":" + d++ + ".");
|
||||
this.logger.debug("Writing " + dataFile.getName() + ":"
|
||||
+ skippedLineCount++ + ".");
|
||||
lineD = brD.readLine();
|
||||
try {
|
||||
this.writeTpfLine(source, fileSize, keysAndValues);
|
||||
} catch (IOException ex) {
|
||||
this.logger.warn("Error writing output line. "
|
||||
+ "Aborting to merge " + dataFile.getName() + " and "
|
||||
+ extradataFile.getName() + ".", e);
|
||||
+ extradataFile.getName() + ".", skippedExtraDataCount);
|
||||
break;
|
||||
}
|
||||
}
|
||||
@ -480,11 +487,11 @@ public class TorperfDownloader extends Thread {
|
||||
SortedMap<String, String> data = new TreeMap<String, String>();
|
||||
try {
|
||||
for (Map.Entry<Integer, String> e : this.dataTimestamps.entrySet()) {
|
||||
int i = e.getKey();
|
||||
if (parts.length > i + 1) {
|
||||
int intKey = e.getKey();
|
||||
if (parts.length > intKey + 1) {
|
||||
String key = e.getValue();
|
||||
String value = String.format("%s.%02d", parts[i],
|
||||
Integer.parseInt(parts[i + 1]) / 10000);
|
||||
String value = String.format("%s.%02d", parts[intKey],
|
||||
Integer.parseInt(parts[intKey + 1]) / 10000);
|
||||
data.put(key, value);
|
||||
}
|
||||
}
|
||||
@ -623,7 +630,7 @@ public class TorperfDownloader extends Thread {
|
||||
this.cachedTpfLines = null;
|
||||
}
|
||||
|
||||
/* Delete all files from the rsync directory that have not been modified
|
||||
/** Delete all files from the rsync directory that have not been modified
|
||||
* in the last three days. */
|
||||
public void cleanUpRsyncDirectory() {
|
||||
long cutOffMillis = System.currentTimeMillis()
|
||||
|
@ -84,7 +84,7 @@ BridgeSnapshotsDirectory = in/bridge-descriptors/
|
||||
## Replace IP addresses in sanitized bridge descriptors with 10.x.y.z
|
||||
## where x.y.z = H(IP address | bridge identity | secret)[:3], so that we
|
||||
## can learn about IP address changes.
|
||||
ReplaceIPAddressesWithHashes = false
|
||||
ReplaceIpAddressesWithHashes = false
|
||||
#
|
||||
## Limit internal bridge descriptor mapping state to the following number
|
||||
## of days, or inf for unlimited.
|
||||
|
@ -63,10 +63,10 @@ public class ConfigurationTest {
|
||||
Configuration conf = new Configuration();
|
||||
conf.load(new ByteArrayInputStream(("CompressRelayDescriptorDownloads=false"
|
||||
+ "\nImportDirectoryArchives = trUe"
|
||||
+ "\nReplaceIPAddressesWithHashes= false").getBytes()));
|
||||
+ "\nReplaceIpAddressesWithHashes= false").getBytes()));
|
||||
assertFalse(conf.getBool(Key.CompressRelayDescriptorDownloads));
|
||||
assertTrue(conf.getBool(Key.ImportDirectoryArchives));
|
||||
assertFalse(conf.getBool(Key.ReplaceIPAddressesWithHashes));
|
||||
assertFalse(conf.getBool(Key.ReplaceIpAddressesWithHashes));
|
||||
}
|
||||
|
||||
@Test()
|
||||
|
Loading…
Reference in New Issue
Block a user