Replace System.{out|err} calls with proper logging.

This commit is contained in:
Karsten Loesing 2018-08-20 21:51:29 +02:00
parent ab164e7a4b
commit 344f7a5da0
12 changed files with 160 additions and 106 deletions

View File

@ -12,6 +12,9 @@ import org.torproject.descriptor.ExtraInfoDescriptor;
import org.torproject.descriptor.NetworkStatusEntry;
import org.torproject.descriptor.RelayNetworkStatusConsensus;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.BufferedWriter;
import java.io.File;
import java.io.FileWriter;
@ -25,6 +28,8 @@ import java.util.TreeMap;
public class Main {
private static Logger log = LoggerFactory.getLogger(Main.class);
/** Executes this data-processing module. */
public static void main(String[] args) throws Exception {
parseArgs(args);
@ -46,8 +51,8 @@ public class Main {
writeToSingleFile = false;
byStatsDateNotByDescHour = false;
} else {
System.err.println("Usage: java " + Main.class.getName()
+ " [ --stats-date | --desc-hour ]");
log.warn("Usage: java {} [ --stats-date | --desc-hour ]",
Main.class.getName());
System.exit(1);
}
}

View File

@ -8,6 +8,9 @@ import org.torproject.descriptor.DescriptorReader;
import org.torproject.descriptor.DescriptorSourceFactory;
import org.torproject.descriptor.ExtraInfoDescriptor;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.BufferedReader;
import java.io.BufferedWriter;
import java.io.File;
@ -30,6 +33,8 @@ import java.util.TreeSet;
public class Main {
private static Logger log = LoggerFactory.getLogger(Main.class);
static class RawStat implements Comparable<RawStat> {
/* Date when the statistics interval ended in days since the epoch. */
@ -73,13 +78,13 @@ public class Main {
return new RawStat(dateDays, fingerprint, fractionRead,
fractionWrite, fractionBoth);
} else {
System.err.println("Could not deserialize raw statistic from "
+ "string '" + string + "'.");
log.warn("Could not deserialize raw statistic from string '" + string
+ "'.");
return null;
}
} catch (NumberFormatException e) {
System.err.println("Could not deserialize raw statistic from "
+ "string '" + string + "'.");
log.warn("Could not deserialize raw statistic from string '{}'.",
string, e);
return null;
}
}
@ -133,14 +138,13 @@ public class Main {
SortedMap<String, Long> parseHistory = parseParseHistory(
readStringFromFile(parseHistoryFile));
if (parseHistory == null) {
System.err.println("Could not parse "
+ parseHistoryFile.getAbsolutePath() + ". Proceeding without "
+ "parse history.");
log.warn("Could not parse {}. Proceeding without parse history.",
parseHistoryFile.getAbsolutePath());
}
SortedMap<String, Short> aggregateStats = parseAggregateStats(
readStringFromFile(aggregateStatsFile));
if (aggregateStats == null) {
System.err.println("Could not parse previously aggregated "
log.warn("Could not parse previously aggregated "
+ "statistics. Not proceeding, because we would otherwise "
+ "lose previously aggregated values for which we don't have "
+ "raw statistics anymore.");
@ -150,7 +154,7 @@ public class Main {
parseHistory = addRawStatsFromDescriptors(newRawStats,
descriptorsDirectories, parseHistory);
if (parseHistory == null) {
System.err.println("Could not parse raw statistics from "
log.warn("Could not parse raw statistics from "
+ "descriptors. Not proceeding, because we would otherwise "
+ "leave out those descriptors in future runs.");
return;
@ -159,7 +163,7 @@ public class Main {
SortedSet<RawStat> rawStats = parseRawStats(
readStringFromFile(rawStatsFile));
if (rawStats == null) {
System.err.println("Could not parse previously parsed raw "
log.warn("Could not parse previously parsed raw "
+ "statistics. Not proceeding, because we might otherwise "
+ "leave out previously parsed statistics in the aggregates.");
return;
@ -167,7 +171,8 @@ public class Main {
SortedSet<Long> conflictingDates = mergeRawStats(rawStats,
newRawStats);
if (!conflictingDates.isEmpty()) {
System.err.print("Could not update aggregate statistics, because "
StringBuilder sb = new StringBuilder(
"Could not update aggregate statistics, because "
+ "we already aggregated statistics for at least one contained "
+ "date and discarded the underlying raw statistics. Not "
+ "proceeding. To fix this, you'll have to re-import "
@ -175,10 +180,10 @@ public class Main {
DateFormat dateFormat = new SimpleDateFormat("yyyy-MM-dd");
dateFormat.setTimeZone(TimeZone.getTimeZone("UTC"));
for (long conflictingDate : conflictingDates) {
System.err.print(" " + dateFormat.format(conflictingDate
* ONE_DAY_IN_MILLIS));
sb.append("\n "
+ dateFormat.format(conflictingDate * ONE_DAY_IN_MILLIS));
}
System.err.println();
log.warn(sb.toString());
return;
}
updateAggregateStats(aggregateStats, rawStats);
@ -237,20 +242,19 @@ public class Main {
while ((line = lnr.readLine()) != null) {
String[] parts = line.split(",");
if (parts.length < 2) {
System.err.println("Invalid line " + lnr.getLineNumber()
+ " in parse history: '" + line + "'.");
log.warn("Invalid line {} in parse history: '{}'.",
lnr.getLineNumber(), line);
return null;
}
parsedParseHistory.put(parts[0], Long.parseLong(parts[1]));
}
} catch (IOException e) {
System.err.println("Unexpected I/O exception while reading line "
+ lnr.getLineNumber() + " from parse history.");
e.printStackTrace();
log.warn("Unexpected I/O exception while reading line {} from parse "
+ "history.", lnr.getLineNumber(), e);
return null;
} catch (NumberFormatException e) {
System.err.println("Invalid line " + lnr.getLineNumber()
+ " in parse history: '" + line + "'.");
log.warn("Invalid line {} in parse history: '{}'.", lnr.getLineNumber(),
line, e);
return null;
}
return parsedParseHistory;
@ -285,28 +289,27 @@ public class Main {
String line = "";
try {
if (!AGGREGATE_STATS_HEADER.equals(lnr.readLine())) {
System.err.println("First line of aggregate statistics does not "
+ "contain the header line. Is this the correct file?");
log.warn("First line of aggregate statistics does not "
+ "contain the header line. Is this the correct file?");
return null;
}
while ((line = lnr.readLine()) != null) {
String[] parts = line.split(",");
if (parts.length != 4) {
System.err.println("Invalid line " + lnr.getLineNumber()
+ " in aggregate statistics: '" + line + "'.");
log.warn("Invalid line {} in aggregate statistics: '{}'.",
lnr.getLineNumber(), line);
return null;
}
parsedAggregateStats.put(parts[0] + "," + parts[1] + ","
+ parts[2], Short.parseShort(parts[3]));
}
} catch (IOException e) {
System.err.println("Unexpected I/O exception while reading line "
+ lnr.getLineNumber() + " from aggregate statistics.");
e.printStackTrace();
log.warn("Unexpected I/O exception while reading line {} from aggregate "
+ "statistics.", lnr.getLineNumber(), e);
return null;
} catch (NumberFormatException e) {
System.err.println("Invalid line " + lnr.getLineNumber()
+ " in aggregate statistics: '" + line + "'.");
log.warn("Invalid line {} in aggregate statistics: '{}'.",
lnr.getLineNumber(), line, e);
return null;
}
return parsedAggregateStats;
@ -332,20 +335,19 @@ public class Main {
while ((line = lnr.readLine()) != null) {
RawStat rawStat = RawStat.fromString(line);
if (rawStat == null) {
System.err.println("Invalid line " + lnr.getLineNumber()
+ " in raw statistics: '" + line + "'.");
log.warn("Invalid line {} in raw statistics: '{}'.",
lnr.getLineNumber(), line);
return null;
}
parsedRawStats.add(rawStat);
}
} catch (IOException e) {
System.err.println("Unexpected I/O exception while reading line "
+ lnr.getLineNumber() + " from raw statistics.");
e.printStackTrace();
log.warn("Unexpected I/O exception while reading line {} from raw "
+ "statistics.", lnr.getLineNumber(), e);
return null;
} catch (NumberFormatException e) {
System.err.println("Invalid line " + lnr.getLineNumber()
+ " in raw statistics: '" + line + "'.");
log.warn("Invalid line {} in raw statistics: '{}'.", lnr.getLineNumber(),
line, e);
return null;
}
return parsedRawStats;
@ -384,8 +386,8 @@ public class Main {
int write = extraInfo.getConnBiDirectWrite();
int both = extraInfo.getConnBiDirectBoth();
if (below < 0 || read < 0 || write < 0 || both < 0) {
System.err.println("Could not parse incomplete conn-bi-direct "
+ "statistics. Skipping descriptor.");
log.debug("Could not parse incomplete conn-bi-direct statistics. "
+ "Skipping descriptor.");
return null;
}
long statsEndMillis = extraInfo.getConnBiDirectStatsEndMillis();

View File

@ -3,6 +3,9 @@
package org.torproject.metrics.stats.hidserv;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.BufferedWriter;
import java.io.File;
import java.io.FileWriter;
@ -21,6 +24,8 @@ import java.util.TreeMap;
* statistics and the total network fraction of reporting relays. */
public class Aggregator {
private static Logger log = LoggerFactory.getLogger(Aggregator.class);
/** Document file containing extrapolated hidden-service statistics. */
private File extrapolatedHidServStatsFile;
@ -57,8 +62,8 @@ public class Aggregator {
this.extrapolatedHidServStatsStore.retrieve(
this.extrapolatedHidServStatsFile);
if (extrapolatedStats == null) {
System.err.printf("Unable to retrieve extrapolated hidden-service "
+ "statistics from file %s. Skipping aggregation step.%n",
log.warn("Unable to retrieve extrapolated hidden-service "
+ "statistics from file {}. Skipping aggregation step.",
this.extrapolatedHidServStatsFile.getAbsolutePath());
return;
}
@ -187,7 +192,7 @@ public class Aggregator {
this.hidservStatsCsvFile))) {
bw.write(sb.toString());
} catch (IOException e) {
System.err.printf("Unable to write results to %s. Ignoring.",
log.warn("Unable to write results to {}. Ignoring.",
this.extrapolatedHidServStatsFile.getAbsolutePath());
}
}

View File

@ -3,6 +3,9 @@
package org.torproject.metrics.stats.hidserv;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.Collections;
import java.util.HashMap;
import java.util.Map;
@ -12,6 +15,9 @@ import java.util.Map;
* status entries and bandwidth weights in a network status consensus. */
public class ComputedNetworkFractions implements Document {
private static Logger log
= LoggerFactory.getLogger(ComputedNetworkFractions.class);
/** Relay fingerprint consisting of 40 upper-case hex characters. */
private String fingerprint;
@ -131,21 +137,18 @@ public class ComputedNetworkFractions implements Document {
@Override
public boolean parse(String[] formattedStrings) {
if (formattedStrings.length != 2) {
System.err.printf("Invalid number of formatted strings. "
+ "Skipping.%n");
log.warn("Invalid number of formatted strings. Skipping.");
return false;
}
String[] firstParts = formattedStrings[0].split(",", 2);
if (firstParts.length != 2) {
System.err.printf("Invalid number of comma-separated values. "
+ "Skipping.%n");
log.warn("Invalid number of comma-separated values. Skipping.");
return false;
}
String fingerprint = firstParts[0];
String[] secondParts = formattedStrings[1].split(",", 3);
if (secondParts.length != 3) {
System.err.printf("Invalid number of comma-separated values. "
+ "Skipping.%n");
log.warn("Invalid number of comma-separated values. Skipping.");
return false;
}
String validAfterDate = firstParts[1];
@ -163,7 +166,7 @@ public class ComputedNetworkFractions implements Document {
if (validAfterDateMillis == DateTimeHelper.NO_TIME_AVAILABLE
|| validAfterTimeMillis < 0L
|| validAfterTimeMillis >= DateTimeHelper.ONE_DAY) {
System.err.printf("Invalid date/hour format. Skipping.%n");
log.warn("Invalid date/hour format. Skipping.");
return false;
}
long validAfterMillis = validAfterDateMillis + validAfterTimeMillis;
@ -176,7 +179,7 @@ public class ComputedNetworkFractions implements Document {
? 0.0 : Double.parseDouble(secondParts[2]);
return true;
} catch (NumberFormatException e) {
System.err.printf("Invalid number format. Skipping.%n");
log.warn("Invalid number format. Skipping.");
return false;
}
}

View File

@ -3,6 +3,9 @@
package org.torproject.metrics.stats.hidserv;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.BufferedReader;
import java.io.BufferedWriter;
import java.io.File;
@ -22,6 +25,8 @@ import java.util.TreeSet;
* interface to a file and later to retrieve them. */
public class DocumentStore<T extends Document> {
private static Logger log = LoggerFactory.getLogger(DocumentStore.class);
/** Document class, needed to create new instances when retrieving
* documents. */
private Class<T> clazz;
@ -41,8 +46,8 @@ public class DocumentStore<T extends Document> {
/* Retrieve existing documents. */
Set<T> retrievedDocuments = this.retrieve(documentFile);
if (retrievedDocuments == null) {
System.err.printf("Unable to read and update %s. Not storing "
+ "documents.%n", documentFile.getAbsoluteFile());
log.warn("Unable to read and update {}. Not storing documents.",
documentFile.getAbsoluteFile());
return false;
}
@ -64,9 +69,9 @@ public class DocumentStore<T extends Document> {
File documentTempFile = new File(documentFile.getAbsoluteFile()
+ ".tmp");
if (documentTempFile.exists()) {
System.err.printf("Temporary document file %s still exists, "
log.warn("Temporary document file {} still exists, "
+ "indicating that a previous execution did not terminate "
+ "cleanly. Not storing documents.%n",
+ "cleanly. Not storing documents.",
documentTempFile.getAbsoluteFile());
return false;
}
@ -86,8 +91,8 @@ public class DocumentStore<T extends Document> {
documentFile.delete();
documentTempFile.renameTo(documentFile);
} catch (IOException e) {
System.err.printf("Unable to write %s. Not storing documents.%n",
documentFile.getAbsolutePath());
log.warn("Unable to write {}. Not storing documents.",
documentFile.getAbsolutePath(), e);
return false;
}
@ -121,9 +126,9 @@ public class DocumentStore<T extends Document> {
if (!line.startsWith(" ")) {
formattedString0 = line;
} else if (formattedString0 == null) {
System.err.printf("First line in %s must not start with a "
+ "space. Not retrieving any previously stored "
+ "documents.%n", documentFile.getAbsolutePath());
log.warn("First line in {} must not start with a space. Not "
+ "retrieving any previously stored documents.",
documentFile.getAbsolutePath());
return null;
} else if (prefix.length() > formattedString0.length()
&& !(formattedString0 + line.substring(1))
@ -138,24 +143,21 @@ public class DocumentStore<T extends Document> {
T document = this.clazz.newInstance();
if (!document.parse(new String[] { formattedString0,
line.substring(1) })) {
System.err.printf("Unable to read line %d from %s. Not "
+ "retrieving any previously stored documents.%n",
lnr.getLineNumber(), documentFile.getAbsolutePath());
log.warn("Unable to read line {} from {}. Not retrieving any "
+ "previously stored documents.", lnr.getLineNumber(),
documentFile.getAbsolutePath());
return null;
}
result.add(document);
}
}
} catch (IOException e) {
System.err.printf("Unable to read %s. Not retrieving any "
+ "previously stored documents.%n",
documentFile.getAbsolutePath());
e.printStackTrace();
log.warn("Unable to read {}. Not retrieving any previously stored "
+ "documents.", documentFile.getAbsolutePath(), e);
return null;
} catch (InstantiationException | IllegalAccessException e) {
System.err.printf("Unable to read %s. Cannot instantiate document "
+ "object.%n", documentFile.getAbsolutePath());
e.printStackTrace();
log.warn("Unable to read {}. Cannot instantiate document object.",
documentFile.getAbsolutePath(), e);
return null;
}
return result;

View File

@ -3,11 +3,17 @@
package org.torproject.metrics.stats.hidserv;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/** Extrapolated network totals of hidden-service statistics reported by a
* single relay. Extrapolated values are based on reported statistics and
* computed network fractions in the statistics interval. */
public class ExtrapolatedHidServStats implements Document {
private static Logger log
= LoggerFactory.getLogger(ExtrapolatedHidServStats.class);
/** Date of statistics interval end in milliseconds. */
private long statsDateMillis;
@ -130,15 +136,16 @@ public class ExtrapolatedHidServStats implements Document {
@Override
public boolean parse(String[] formattedStrings) {
if (formattedStrings.length != 2) {
System.err.printf("Invalid number of formatted strings. Skipping.%n");
log.warn("Invalid number of formatted strings: {}. Skipping.",
formattedStrings.length);
return false;
}
long statsDateMillis = DateTimeHelper.parse(formattedStrings[0],
DateTimeHelper.ISO_DATE_FORMAT);
String[] secondParts = formattedStrings[1].split(",", 5);
if (secondParts.length != 5) {
System.err.printf("Invalid number of comma-separated values. "
+ "Skipping.%n");
log.warn("Invalid number of comma-separated values: {}. Skipping.",
secondParts.length);
return false;
}
String fingerprint = secondParts[0];

View File

@ -3,6 +3,9 @@
package org.torproject.metrics.stats.hidserv;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.File;
import java.util.HashSet;
import java.util.Map;
@ -17,6 +20,8 @@ import java.util.TreeSet;
* observed by the relay. */
public class Extrapolator {
private static Logger log = LoggerFactory.getLogger(Extrapolator.class);
/** Document file containing previously parsed reported hidden-service
* statistics. */
private File reportedHidServStatsFile;
@ -84,8 +89,8 @@ public class Extrapolator {
/* Make sure that all documents could be retrieved correctly. */
if (extrapolatedStats == null || reportedStats == null) {
System.err.printf("Could not read previously parsed or "
+ "extrapolated hidserv-stats. Skipping.");
log.warn("Could not read previously parsed or extrapolated "
+ "hidserv-stats. Skipping.");
return false;
}

View File

@ -3,6 +3,9 @@
package org.torproject.metrics.stats.hidserv;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.File;
/** Main class for updating extrapolated network totals of hidden-service
@ -11,6 +14,8 @@ import java.io.File;
* do not overlap. */
public class Main {
private static Logger log = LoggerFactory.getLogger(Main.class);
/** Parses new descriptors, extrapolate contained statistics using
* computed network fractions, aggregate results, and writes results to
* disk. */
@ -24,8 +29,7 @@ public class Main {
/* Initialize parser and read parse history to avoid parsing
* descriptor files that haven't changed since the last execution. */
System.out.println("Initializing parser and reading parse "
+ "history...");
log.info("Initializing parser and reading parse history...");
DocumentStore<ReportedHidServStats> reportedHidServStatsStore =
new DocumentStore<>(ReportedHidServStats.class);
DocumentStore<ComputedNetworkFractions>
@ -37,29 +41,28 @@ public class Main {
/* Parse new descriptors and store their contents using the document
* stores. */
System.out.println("Parsing descriptors...");
log.info("Parsing descriptors...");
parser.parseDescriptors();
/* Write the parse history to avoid parsing descriptor files again
* next time. It's okay to do this now and not at the end of the
* execution, because even if something breaks apart below, it's safe
* not to parse descriptor files again. */
System.out.println("Writing parse history...");
log.info("Writing parse history...");
parser.writeParseHistory();
/* Extrapolate reported statistics using computed network fractions
* and write the result to disk using a document store. The result is
* a single file with extrapolated network totals based on reports by
* single relays. */
System.out.println("Extrapolating statistics...");
log.info("Extrapolating statistics...");
DocumentStore<ExtrapolatedHidServStats> extrapolatedHidServStatsStore
= new DocumentStore<>(ExtrapolatedHidServStats.class);
Extrapolator extrapolator = new Extrapolator(statusDirectory,
reportedHidServStatsStore, computedNetworkFractionsStore,
extrapolatedHidServStatsStore);
if (!extrapolator.extrapolateHidServStats()) {
System.err.println("Could not extrapolate statistics. "
+ "Terminating.");
log.warn("Could not extrapolate statistics. Terminating.");
return;
}
@ -67,14 +70,14 @@ public class Main {
* This includes calculating daily weighted interquartile means, among
* other statistics. Write the result to a .csv file that can be
* processed by other tools. */
System.out.println("Aggregating statistics...");
log.info("Aggregating statistics...");
File hidservStatsExtrapolatedCsvFile = new File("stats/hidserv.csv");
Aggregator aggregator = new Aggregator(statusDirectory,
extrapolatedHidServStatsStore, hidservStatsExtrapolatedCsvFile);
aggregator.aggregateHidServStats();
/* End this execution. */
System.out.println("Terminating.");
log.info("Terminating.");
}
}

View File

@ -10,6 +10,9 @@ import org.torproject.descriptor.ExtraInfoDescriptor;
import org.torproject.descriptor.NetworkStatusEntry;
import org.torproject.descriptor.RelayNetworkStatusConsensus;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.BufferedReader;
import java.io.BufferedWriter;
import java.io.File;
@ -32,6 +35,8 @@ import java.util.TreeSet;
* document files for later use. */
public class Parser {
private static Logger log = LoggerFactory.getLogger(Parser.class);
/** File containing tuples of last-modified times and file names of
* descriptor files parsed in the previous execution. */
private File parseHistoryFile;
@ -106,14 +111,14 @@ public class Parser {
String[] parts = line.split(" ", 2);
excludedFiles.put(parts[1], Long.parseLong(parts[0]));
} catch (NumberFormatException e) {
System.err.printf("Illegal line '%s' in parse history. "
+ "Skipping line.%n", line);
log.warn("Illegal line '{}' in parse history. Skipping line.", line,
e);
}
}
} catch (IOException e) {
System.err.printf("Could not read history file '%s'. Not "
log.warn("Could not read history file '{}'. Not "
+ "excluding descriptors in this execution.",
this.parseHistoryFile.getAbsolutePath());
this.parseHistoryFile.getAbsolutePath(), e);
}
/* Tell the descriptor reader to exclude the files contained in the
@ -146,9 +151,8 @@ public class Parser {
+ "\n");
}
} catch (IOException e) {
System.err.printf("Could not write history file '%s'. Not "
+ "excluding descriptors in next execution.",
this.parseHistoryFile.getAbsolutePath());
log.warn("Could not write history file '{}'. Not excluding descriptors "
+ "in next execution.", this.parseHistoryFile.getAbsolutePath(), e);
}
}
@ -234,8 +238,8 @@ public class Parser {
* because relays can in theory write anything in their extra-info
* descriptors. But maybe we'll want to know. */
} else {
System.err.println("Relay " + fingerprint + " published "
+ "incomplete hidserv-stats. Ignoring.");
log.warn("Relay {} published incomplete hidserv-stats. Ignoring.",
fingerprint);
}
}
@ -257,8 +261,8 @@ public class Parser {
SortedMap<String, Integer> bandwidthWeights =
consensus.getBandwidthWeights();
if (bandwidthWeights == null) {
System.err.printf("Consensus with valid-after time %s doesn't "
+ "contain any Wxx weights. Skipping.%n",
log.warn("Consensus with valid-after time {} doesn't contain any Wxx "
+ "weights. Skipping.",
DateTimeHelper.format(consensus.getValidAfterMillis()));
return;
}
@ -269,8 +273,8 @@ public class Parser {
new TreeSet<>(Arrays.asList("Wmg,Wmm,Wme,Wmd".split(",")));
expectedWeightKeys.removeAll(bandwidthWeights.keySet());
if (!expectedWeightKeys.isEmpty()) {
System.err.printf("Consensus with valid-after time %s doesn't "
+ "contain expected Wmx weights. Skipping.%n",
log.warn("Consensus with valid-after time {} doesn't contain expected "
+ "Wmx weights. Skipping.",
DateTimeHelper.format(consensus.getValidAfterMillis()));
return;
}

View File

@ -3,11 +3,17 @@
package org.torproject.metrics.stats.hidserv;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/* Hidden-service statistics reported by a single relay covering a single
* statistics interval of usually 24 hours. These statistics are reported
* by the relay in the "hidserv-" lines of its extra-info descriptor. */
public class ReportedHidServStats implements Document {
private static Logger log
= LoggerFactory.getLogger(ReportedHidServStats.class);
/* Relay fingerprint consisting of 40 upper-case hex characters. */
private String fingerprint;
@ -109,7 +115,8 @@ public class ReportedHidServStats implements Document {
@Override
public boolean parse(String[] formattedStrings) {
if (formattedStrings.length != 2) {
System.err.printf("Invalid number of formatted strings. Skipping.%n");
log.warn("Invalid number of formatted strings: {} Skipping.",
formattedStrings.length);
return false;
}
String[] secondParts = formattedStrings[1].split(",", 4);

View File

@ -3,6 +3,9 @@
package org.torproject.metrics.stats.hidserv;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.BufferedWriter;
import java.io.File;
import java.io.FileWriter;
@ -20,6 +23,9 @@ import java.util.TreeSet;
/* NOTE: This class is not required for running the Main class! (It
* contains its own main method.) */
public class Simulate {
private static Logger log = LoggerFactory.getLogger(Simulate.class);
private static File simCellsCsvFile =
new File("out/csv/sim-cells.csv");
@ -28,11 +34,11 @@ public class Simulate {
/** Runs two simulations to evaluate this data-processing module. */
public static void main(String[] args) throws Exception {
System.out.print("Simulating extrapolation of rendezvous cells");
log.info("Simulating extrapolation of rendezvous cells");
simulateManyCells();
System.out.print("\nSimulating extrapolation of .onions");
log.info("Simulating extrapolation of .onions");
simulateManyOnions();
System.out.println("\nTerminating.");
log.info("Terminating.");
}
private static Random rnd = new Random();
@ -45,7 +51,7 @@ public class Simulate {
final int numberOfExtrapolations = 1000;
for (int i = 0; i < numberOfExtrapolations; i++) {
bw.write(simulateCells(i));
System.out.print(".");
log.info(".");
}
bw.close();
}
@ -58,7 +64,7 @@ public class Simulate {
final int numberOfExtrapolations = 1000;
for (int i = 0; i < numberOfExtrapolations; i++) {
bw.write(simulateOnions(i));
System.out.print(".");
log.info(".");
}
bw.close();
}

View File

@ -5,6 +5,8 @@ import com.fasterxml.jackson.annotation.JsonInclude;
import com.fasterxml.jackson.annotation.PropertyAccessor;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.PropertyNamingStrategy;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.BufferedReader;
import java.io.FileWriter;
@ -14,6 +16,9 @@ import java.util.ArrayList;
import java.util.List;
public class UpdateNews {
private static Logger log = LoggerFactory.getLogger(UpdateNews.class);
/** Update news. */
public static void main(String[] args) throws Exception {
URL textFile = new URL(
@ -72,7 +77,7 @@ public class UpdateNews {
int space = desc.indexOf(" ", open);
int close = desc.indexOf("]", open);
if (open < 0 || space < 0 || close < 0) {
System.err.println("Cannot convert link.");
log.warn("Cannot convert link in line {}. Exiting.");
System.exit(1);
}
desc = desc.substring(0, open) + "<a href=\""
@ -84,7 +89,7 @@ public class UpdateNews {
int open = desc.indexOf("`");
int close = desc.indexOf("`", open + 1);
if (open < 0 || close < 0) {
System.err.println("Cannot convert code fragment.");
log.warn("Cannot convert code fragment in line {}. Exiting.");
System.exit(1);
}
desc = desc.substring(0, open) + "<code>"