Run modules from Java only.

Implements #29166.
This commit is contained in:
Karsten Loesing 2019-01-24 10:08:02 +01:00
parent 24aa37f073
commit 829863f48f
23 changed files with 296 additions and 299 deletions

136
build.xml
View File

@ -18,7 +18,8 @@
<property name="name" value="metrics-web"/> <property name="name" value="metrics-web"/>
<property name="project-main-class" <property name="project-main-class"
value="org.torproject.TBD" /> value="org.torproject.metrics.stats.main.Main" />
<property name="jarincludes" value="logback.xml" />
<property name="additional2sign" value="${warfile}" /> <property name="additional2sign" value="${warfile}" />
<property name="tardepends" value="war" /> <property name="tardepends" value="war" />
@ -26,11 +27,6 @@
<property name="specdir" value="${basedir}/generated/spec" /> <property name="specdir" value="${basedir}/generated/spec" />
<!-- Deployment base folder.
Be aware that this is also set in R scripts and web.xml, currently! -->
<property name="metrics-web.deployment.base"
value="/srv/metrics.torproject.org/metrics" />
<!-- The coverage needs to be improved! --> <!-- The coverage needs to be improved! -->
<target name="coverage-check"> <target name="coverage-check">
<cobertura-check totallinerate="0" totalbranchrate="0" > <cobertura-check totallinerate="0" totalbranchrate="0" >
@ -301,85 +297,6 @@
<delete file="${specdir}/${specfile}.tmp2" quiet="true" /> <delete file="${specdir}/${specfile}.tmp2" quiet="true" />
</target> </target>
<!-- This can be adapted to point at the actual work directory. -->
<property name="prepare.deployment"
value="/srv/metrics.torproject.org/metrics/work" />
<!-- Don't alter the following. -->
<property name="modulebase" value="${prepare.deployment}/modules" />
<!-- Operational tasks. -->
<target name="run-web-prepare" depends="init" >
<mkdir dir="${prepare.deployment}/modules" />
<antcall target="collectdescs" />
<antcall target="connbidirect" />
<antcall target="onionperf" />
<antcall target="bwhist" />
<antcall target="advbwdist" />
<antcall target="hidserv" />
<antcall target="clients" />
<antcall target="servers" />
<antcall target="webstats" />
<antcall target="totalcw" />
<antcall target="make-data-available" />
</target>
<target name="collectdescs" >
<property name="module.name" value="collectdescs" />
<antcall target="run-java" />
</target>
<target name="connbidirect" >
<property name="module.name" value="connbidirect" />
<antcall target="run-java" />
</target>
<target name="onionperf" >
<property name="module.name" value="onionperf" />
<antcall target="run-java" />
</target>
<target name="bwhist" >
<property name="module.name" value="bwhist" />
<antcall target="run-java" />
</target>
<target name="advbwdist">
<property name="module.name" value="advbwdist" />
<antcall target="run-java" />
</target>
<target name="hidserv" >
<property name="module.name" value="hidserv" />
<antcall target="run-java" />
</target>
<target name="clients" >
<property name="module.name" value="clients" />
<antcall target="run-java" />
</target>
<target name="servers" >
<property name="module.name" value="servers" />
<antcall target="run-java" >
<param name="module.main"
value="org.torproject.metrics.stats.servers.Main" />
</antcall>
</target>
<target name="webstats" >
<property name="module.name" value="webstats" />
<antcall target="run-java" />
</target>
<target name="totalcw" >
<property name="module.name" value="totalcw" />
<antcall target="run-java" >
<param name="module.main"
value="org.torproject.metrics.stats.totalcw.Main" />
</antcall>
</target>
<!-- <!--
The run-rserver target documents a working option of The run-rserver target documents a working option of
configuring an R server for running metrics-web. configuring an R server for running metrics-web.
@ -401,55 +318,6 @@
</exec> </exec>
</target> </target>
<target name="make-data-available" >
<property name="statsdir" value="${metrics-web.deployment.base}/shared/stats/" />
<mkdir dir="${statsdir}" />
<property name="rdatadir" value="${metrics-web.deployment.base}/shared/RData" />
<mkdir dir="${rdatadir}" />
<copy todir="${statsdir}" >
<fileset dir="${modulebase}/onionperf/stats" includes="*.csv" />
<fileset dir="${modulebase}/connbidirect/stats" includes="connbidirect2.csv" />
<fileset dir="${modulebase}/advbwdist/stats" includes="advbwdist.csv" />
<fileset dir="${modulebase}/bwhist/stats" includes="*.csv" />
<fileset dir="${modulebase}/hidserv/stats" includes="hidserv.csv" />
<fileset dir="${modulebase}/clients/stats"
includes="clients*.csv userstats-combined.csv" />
<fileset dir="${modulebase}/servers/stats" includes="*.csv" />
<fileset dir="${modulebase}/webstats/stats" includes="webstats.csv" />
<fileset dir="${modulebase}/totalcw/stats" includes="totalcw.csv" />
</copy>
<copy todir="${rdatadir}" >
<fileset dir="${resources}/web/images/" includes="no-data-available.*" />
</copy>
</target>
<!-- Support tasks for operation -->
<target name="run-java">
<echo message="Running java module ${module.name} ... " />
<available file="${dist}/${jarfile}" property="have.jar"/>
<fail unless="have.jar" message="Please run 'ant jar' first."/>
<condition property="mainclass"
value="${module.main}"
else="org.torproject.metrics.stats.${module.name}.Main" >
<isset property="module.main"/>
</condition>
<property name="workingdir" value="${modulebase}/${module.name}" />
<mkdir dir="${workingdir}" />
<java dir="${workingdir}"
fork="true"
maxmemory="4g"
classname="${mainclass}">
<classpath>
<pathelement location="${dist}/${jarfile}"/>
<pathelement location="${resources}"/>
</classpath>
<jvmarg value="-DLOGBASE=../logs"/>
<jvmarg value="-Duser.language=us" />
<jvmarg value="-Duser.region=US" />
</java>
<echo message="Java module ${module.name} finished. " />
</target>
<!-- The following line adds the common targets and properties <!-- The following line adds the common targets and properties
for Metrics' Java Projects. for Metrics' Java Projects.
--> -->

View File

Before

Width:  |  Height:  |  Size: 26 KiB

After

Width:  |  Height:  |  Size: 26 KiB

View File

@ -333,7 +333,7 @@ copyright_notice <- "The Tor Project - https://metrics.torproject.org/"
stats_dir <- "/srv/metrics.torproject.org/metrics/shared/stats/" stats_dir <- "/srv/metrics.torproject.org/metrics/shared/stats/"
rdata_dir <- "/srv/metrics.torproject.org/metrics/shared/RData/" no_data_available_dir <- "/srv/metrics.torproject.org/metrics/src/main/R/rserver/"
# Helper function that copies the appropriate no data object to filename. # Helper function that copies the appropriate no data object to filename.
copy_no_data <- function(filename) { copy_no_data <- function(filename) {
@ -342,8 +342,8 @@ copy_no_data <- function(filename) {
if (".csv" == extension) { if (".csv" == extension) {
write("# No data available for the given parameters.", file=filename) write("# No data available for the given parameters.", file=filename)
} else { } else {
file.copy(paste(rdata_dir, "no-data-available", extension, sep = ""), file.copy(paste(no_data_available_dir, "no-data-available", extension,
filename) sep = ""), filename)
} }
} }

View File

@ -32,6 +32,9 @@ import java.util.TreeMap;
public class Main { public class Main {
private static final File baseDir = new File(
org.torproject.metrics.stats.main.Main.modulesDir, "advbwdist");
/** Executes this data-processing module. */ /** Executes this data-processing module. */
public static void main(String[] args) throws IOException { public static void main(String[] args) throws IOException {
@ -41,7 +44,8 @@ public class Main {
DescriptorReader descriptorReader = DescriptorReader descriptorReader =
DescriptorSourceFactory.createDescriptorReader(); DescriptorSourceFactory.createDescriptorReader();
for (Descriptor descriptor : descriptorReader.readDescriptors(new File( for (Descriptor descriptor : descriptorReader.readDescriptors(new File(
"../../shared/in/recent/relay-descriptors/server-descriptors"))) { org.torproject.metrics.stats.main.Main.descriptorsDir,
"recent/relay-descriptors/server-descriptors"))) {
if (!(descriptor instanceof ServerDescriptor)) { if (!(descriptor instanceof ServerDescriptor)) {
continue; continue;
} }
@ -56,9 +60,9 @@ public class Main {
/* Parse consensuses, keeping a parse history. */ /* Parse consensuses, keeping a parse history. */
descriptorReader = DescriptorSourceFactory.createDescriptorReader(); descriptorReader = DescriptorSourceFactory.createDescriptorReader();
File historyFile = new File("status/parsed-consensuses"); File historyFile = new File(baseDir, "status/parsed-consensuses");
descriptorReader.setHistoryFile(historyFile); descriptorReader.setHistoryFile(historyFile);
File resultsFile = new File("stats/advbwdist-validafter.csv"); File resultsFile = new File(baseDir, "stats/advbwdist-validafter.csv");
resultsFile.getParentFile().mkdirs(); resultsFile.getParentFile().mkdirs();
boolean writeHeader = !resultsFile.exists(); boolean writeHeader = !resultsFile.exists();
BufferedWriter bw = new BufferedWriter(new FileWriter(resultsFile, BufferedWriter bw = new BufferedWriter(new FileWriter(resultsFile,
@ -70,7 +74,8 @@ public class Main {
"yyyy-MM-dd HH:mm:ss"); "yyyy-MM-dd HH:mm:ss");
dateTimeFormat.setTimeZone(TimeZone.getTimeZone("UTC")); dateTimeFormat.setTimeZone(TimeZone.getTimeZone("UTC"));
for (Descriptor descriptor : descriptorReader.readDescriptors(new File( for (Descriptor descriptor : descriptorReader.readDescriptors(new File(
"../../shared/in/recent/relay-descriptors/consensuses"))) { org.torproject.metrics.stats.main.Main.descriptorsDir,
"recent/relay-descriptors/consensuses"))) {
if (!(descriptor instanceof RelayNetworkStatusConsensus)) { if (!(descriptor instanceof RelayNetworkStatusConsensus)) {
continue; continue;
} }
@ -165,7 +170,7 @@ public class Main {
preAggregatedValues.get(keyWithoutTime).add(value); preAggregatedValues.get(keyWithoutTime).add(value);
} }
} }
File aggregateResultsFile = new File("stats/advbwdist.csv"); File aggregateResultsFile = new File(baseDir, "stats/advbwdist.csv");
aggregateResultsFile.getParentFile().mkdirs(); aggregateResultsFile.getParentFile().mkdirs();
try (BufferedWriter bw2 = new BufferedWriter( try (BufferedWriter bw2 = new BufferedWriter(
new FileWriter(aggregateResultsFile))) { new FileWriter(aggregateResultsFile))) {

View File

@ -1,18 +0,0 @@
/* Copyright 2011--2018 The Tor Project
* See LICENSE for licensing information */
package org.torproject.metrics.stats.bwhist;
/** Configuration options parsed from Java properties with reasonable hard-coded
* defaults. */
public class Configuration {
static String descriptors = System.getProperty("bwhist.descriptors",
"../../shared/in/");
static String database = System.getProperty("bwhist.database",
"jdbc:postgresql:tordir");
static String history = System.getProperty("bwhist.history",
"status/read-descriptors");
static String output = System.getProperty("bwhist.output",
"stats/");
}

View File

@ -7,7 +7,6 @@ import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
import java.io.File; import java.io.File;
import java.nio.file.Paths;
import java.util.Arrays; import java.util.Arrays;
/** /**
@ -18,11 +17,19 @@ public class Main {
private static Logger log = LoggerFactory.getLogger(Main.class); private static Logger log = LoggerFactory.getLogger(Main.class);
private static String[][] paths = { private static String[] paths = {
{"recent", "relay-descriptors", "consensuses"}, "recent/relay-descriptors/consensuses",
{"recent", "relay-descriptors", "extra-infos"}, "recent/relay-descriptors/extra-infos",
{"archive", "relay-descriptors", "consensuses"}, "archive/relay-descriptors/consensuses",
{"archive", "relay-descriptors", "extra-infos"}}; "archive/relay-descriptors/extra-infos" };
private static final String jdbcString = String.format(
"jdbc:postgresql://localhost/tordir?user=%s&password=%s",
System.getProperty("metrics.dbuser", "metrics"),
System.getProperty("metrics.dbpass", "password"));
private static final File baseDir = new File(
org.torproject.metrics.stats.main.Main.modulesDir, "bwhist");
/** Executes this data-processing module. */ /** Executes this data-processing module. */
public static void main(String[] args) throws Exception { public static void main(String[] args) throws Exception {
@ -31,20 +38,20 @@ public class Main {
log.info("Reading descriptors and inserting relevant parts into the " log.info("Reading descriptors and inserting relevant parts into the "
+ "database."); + "database.");
File[] descriptorDirectories = Arrays.stream(paths).map((String[] path) File[] descriptorDirectories = Arrays.stream(paths).map((String path)
-> Paths.get(Configuration.descriptors, path).toFile()) -> new File(org.torproject.metrics.stats.main.Main.descriptorsDir,
.toArray(File[]::new); path)).toArray(File[]::new);
File historyFile = new File(Configuration.history); File historyFile = new File(baseDir, "status/read-descriptors");
RelayDescriptorDatabaseImporter database RelayDescriptorDatabaseImporter database
= new RelayDescriptorDatabaseImporter(descriptorDirectories, = new RelayDescriptorDatabaseImporter(descriptorDirectories,
historyFile, Configuration.database); historyFile, jdbcString);
database.importRelayDescriptors(); database.importRelayDescriptors();
log.info("Aggregating database entries."); log.info("Aggregating database entries.");
database.aggregate(); database.aggregate();
log.info("Querying aggregated statistics from the database."); log.info("Querying aggregated statistics from the database.");
new Writer().write(Paths.get(Configuration.output, "bandwidth.csv"), new Writer().write(new File(baseDir, "stats/bandwidth.csv").toPath(),
database.queryBandwidth()); database.queryBandwidth());
log.info("Closing database connection."); log.info("Closing database connection.");

View File

@ -22,7 +22,10 @@ import java.util.TimeZone;
class Database implements AutoCloseable { class Database implements AutoCloseable {
/** Database connection string. */ /** Database connection string. */
private String jdbcString; private static final String jdbcString = String.format(
"jdbc:postgresql://localhost/userstats?user=%s&password=%s",
System.getProperty("metrics.dbuser", "metrics"),
System.getProperty("metrics.dbpass", "password"));
/** Connection object for all interactions with the database. */ /** Connection object for all interactions with the database. */
private Connection connection; private Connection connection;
@ -33,14 +36,13 @@ class Database implements AutoCloseable {
/** Create a new Database instance and prepare for inserting or querying /** Create a new Database instance and prepare for inserting or querying
* data. */ * data. */
Database(String jdbcString) throws SQLException { Database() throws SQLException {
this.jdbcString = jdbcString;
this.connect(); this.connect();
this.prepareStatements(); this.prepareStatements();
} }
private void connect() throws SQLException { private void connect() throws SQLException {
this.connection = DriverManager.getConnection(this.jdbcString); this.connection = DriverManager.getConnection(jdbcString);
this.connection.setAutoCommit(false); this.connection.setAutoCommit(false);
} }

View File

@ -53,7 +53,6 @@ import java.io.FileWriter;
import java.io.IOException; import java.io.IOException;
import java.io.LineNumberReader; import java.io.LineNumberReader;
import java.nio.file.Path; import java.nio.file.Path;
import java.nio.file.Paths;
import java.time.LocalDate; import java.time.LocalDate;
import java.time.format.DateTimeParseException; import java.time.format.DateTimeParseException;
import java.util.ArrayList; import java.util.ArrayList;
@ -70,10 +69,12 @@ import java.util.stream.Collectors;
public class Detector { public class Detector {
/** Input file. */ /** Input file. */
private static final Path INPUT_PATH = Paths.get("stats", "userstats.csv"); private static final Path INPUT_PATH = new File(Main.baseDir,
"stats/userstats.csv").toPath();
/** Output file. */ /** Output file. */
private static final Path OUTPUT_PATH = Paths.get("stats", "clients.csv"); private static final Path OUTPUT_PATH = new File(Main.baseDir,
"stats/clients.csv").toPath();
/** Number of largest locations to be included in the detection algorithm. */ /** Number of largest locations to be included in the detection algorithm. */
private static final int NUM_LARGEST_LOCATIONS = 50; private static final int NUM_LARGEST_LOCATIONS = 50;

View File

@ -16,7 +16,6 @@ import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
import java.io.File; import java.io.File;
import java.nio.file.Paths;
import java.sql.SQLException; import java.sql.SQLException;
import java.util.Map; import java.util.Map;
import java.util.SortedMap; import java.util.SortedMap;
@ -26,18 +25,18 @@ public class Main {
private static Logger log = LoggerFactory.getLogger(Main.class); private static Logger log = LoggerFactory.getLogger(Main.class);
private static final String jdbcString
= System.getProperty("clients.database", "jdbc:postgresql:userstats");
private static Database database; private static Database database;
static final File baseDir = new File(
org.torproject.metrics.stats.main.Main.modulesDir, "clients");
/** Executes this data-processing module. */ /** Executes this data-processing module. */
public static void main(String[] args) throws Exception { public static void main(String[] args) throws Exception {
log.info("Starting clients module."); log.info("Starting clients module.");
log.info("Connecting to database."); log.info("Connecting to database.");
database = new Database(jdbcString); database = new Database();
log.info("Reading relay descriptors and importing relevant parts into the " log.info("Reading relay descriptors and importing relevant parts into the "
+ "database."); + "database.");
@ -52,10 +51,10 @@ public class Main {
database.commit(); database.commit();
log.info("Querying aggregated statistics from the database."); log.info("Querying aggregated statistics from the database.");
new Writer().write(Paths.get("stats", "userstats.csv"), new Writer().write(new File(baseDir, "stats/userstats.csv").toPath(),
database.queryEstimated()); database.queryEstimated());
new Writer().write(Paths.get("stats", "userstats-combined.csv"), new Writer().write(new File(baseDir, "stats/userstats-combined.csv")
database.queryCombined()); .toPath(), database.queryCombined());
log.info("Disconnecting from database."); log.info("Disconnecting from database.");
database.close(); database.close();
@ -75,13 +74,17 @@ public class Main {
private static void parseRelayDescriptors() throws Exception { private static void parseRelayDescriptors() throws Exception {
DescriptorReader descriptorReader = DescriptorReader descriptorReader =
DescriptorSourceFactory.createDescriptorReader(); DescriptorSourceFactory.createDescriptorReader();
File historyFile = new File("status/relay-descriptors"); File historyFile = new File(baseDir, "status/relay-descriptors");
descriptorReader.setHistoryFile(historyFile); descriptorReader.setHistoryFile(historyFile);
for (Descriptor descriptor : descriptorReader.readDescriptors( for (Descriptor descriptor : descriptorReader.readDescriptors(
new File("../../shared/in/recent/relay-descriptors/consensuses"), new File(org.torproject.metrics.stats.main.Main.descriptorsDir,
new File("../../shared/in/recent/relay-descriptors/extra-infos"), "recent/relay-descriptors/consensuses"),
new File("../../shared/in/archive/relay-descriptors/consensuses"), new File(org.torproject.metrics.stats.main.Main.descriptorsDir,
new File("../../shared/in/archive/relay-descriptors/extra-infos"))) { "recent/relay-descriptors/extra-infos"),
new File(org.torproject.metrics.stats.main.Main.descriptorsDir,
"archive/relay-descriptors/consensuses"),
new File(org.torproject.metrics.stats.main.Main.descriptorsDir,
"archive/relay-descriptors/extra-infos"))) {
if (descriptor instanceof ExtraInfoDescriptor) { if (descriptor instanceof ExtraInfoDescriptor) {
parseRelayExtraInfoDescriptor((ExtraInfoDescriptor) descriptor); parseRelayExtraInfoDescriptor((ExtraInfoDescriptor) descriptor);
} else if (descriptor instanceof RelayNetworkStatusConsensus) { } else if (descriptor instanceof RelayNetworkStatusConsensus) {
@ -209,11 +212,13 @@ public class Main {
private static void parseBridgeDescriptors() throws Exception { private static void parseBridgeDescriptors() throws Exception {
DescriptorReader descriptorReader = DescriptorReader descriptorReader =
DescriptorSourceFactory.createDescriptorReader(); DescriptorSourceFactory.createDescriptorReader();
File historyFile = new File("status/bridge-descriptors"); File historyFile = new File(baseDir, "status/bridge-descriptors");
descriptorReader.setHistoryFile(historyFile); descriptorReader.setHistoryFile(historyFile);
for (Descriptor descriptor : descriptorReader.readDescriptors( for (Descriptor descriptor : descriptorReader.readDescriptors(
new File("../../shared/in/recent/bridge-descriptors"), new File(org.torproject.metrics.stats.main.Main.descriptorsDir,
new File("../../shared/in/archive/bridge-descriptors"))) { "recent/bridge-descriptors"),
new File(org.torproject.metrics.stats.main.Main.descriptorsDir,
"archive/bridge-descriptors"))) {
if (descriptor instanceof ExtraInfoDescriptor) { if (descriptor instanceof ExtraInfoDescriptor) {
parseBridgeExtraInfoDescriptor( parseBridgeExtraInfoDescriptor(
(ExtraInfoDescriptor) descriptor); (ExtraInfoDescriptor) descriptor);

View File

@ -10,6 +10,9 @@ import java.io.File;
public class Main { public class Main {
private static final File baseDir = new File(
org.torproject.metrics.stats.main.Main.modulesDir, "collectdescs");
/** Executes this data-processing module. */ /** Executes this data-processing module. */
public static void main(String[] args) { public static void main(String[] args) {
/* Fetch recent descriptors from CollecTor. */ /* Fetch recent descriptors from CollecTor. */
@ -27,7 +30,7 @@ public class Main {
"/recent/relay-descriptors/votes/", "/recent/relay-descriptors/votes/",
"/recent/torperf/", "/recent/torperf/",
"/recent/webstats/" "/recent/webstats/"
}, 0L, new File("../../shared/in"), true); }, 0L, org.torproject.metrics.stats.main.Main.descriptorsDir, true);
} }
} }

View File

@ -129,13 +129,18 @@ public class Main {
static final long ONE_DAY_IN_MILLIS = 86400000L; static final long ONE_DAY_IN_MILLIS = 86400000L;
private static final File baseDir = new File(
org.torproject.metrics.stats.main.Main.modulesDir, "connbidirect");
/** Executes this data-processing module. */ /** Executes this data-processing module. */
public static void main(String[] args) throws IOException { public static void main(String[] args) throws IOException {
File parseHistoryFile = new File("stats/parse-history"); File parseHistoryFile = new File(baseDir, "stats/parse-history");
File aggregateStatsFile = new File("stats/connbidirect2.csv"); File aggregateStatsFile = new File(baseDir, "stats/connbidirect2.csv");
File[] descriptorsDirectories = new File[] { File[] descriptorsDirectories = new File[] {
new File("../../shared/in/archive/relay-descriptors/extra-infos"), new File(org.torproject.metrics.stats.main.Main.descriptorsDir,
new File("../../shared/in/recent/relay-descriptors/extra-infos")}; "archive/relay-descriptors/extra-infos"),
new File(org.torproject.metrics.stats.main.Main.descriptorsDir,
"recent/relay-descriptors/extra-infos")};
SortedMap<String, Long> parseHistory = parseParseHistory( SortedMap<String, Long> parseHistory = parseParseHistory(
readStringFromFile(parseHistoryFile)); readStringFromFile(parseHistoryFile));
if (parseHistory == null) { if (parseHistory == null) {
@ -160,7 +165,7 @@ public class Main {
+ "leave out those descriptors in future runs."); + "leave out those descriptors in future runs.");
return; return;
} }
File rawStatsFile = new File("stats/raw-stats"); File rawStatsFile = new File(baseDir, "stats/raw-stats");
SortedSet<RawStat> rawStats = parseRawStats( SortedSet<RawStat> rawStats = parseRawStats(
readStringFromFile(rawStatsFile)); readStringFromFile(rawStatsFile));
if (rawStats == null) { if (rawStats == null) {

View File

@ -16,6 +16,9 @@ public class Main {
private static Logger log = LoggerFactory.getLogger(Main.class); private static Logger log = LoggerFactory.getLogger(Main.class);
private static final File baseDir = new File(
org.torproject.metrics.stats.main.Main.modulesDir, "hidserv");
/** Parses new descriptors, extrapolate contained statistics using /** Parses new descriptors, extrapolate contained statistics using
* computed network fractions, aggregate results, and writes results to * computed network fractions, aggregate results, and writes results to
* disk. */ * disk. */
@ -23,9 +26,11 @@ public class Main {
/* Initialize directories and file paths. */ /* Initialize directories and file paths. */
File[] inDirectories = new File[] { File[] inDirectories = new File[] {
new File("../../shared/in/recent/relay-descriptors/consensuses"), new File(org.torproject.metrics.stats.main.Main.descriptorsDir,
new File("../../shared/in/recent/relay-descriptors/extra-infos") }; "recent/relay-descriptors/consensuses"),
File statusDirectory = new File("status"); new File(org.torproject.metrics.stats.main.Main.descriptorsDir,
"recent/relay-descriptors/extra-infos") };
File statusDirectory = new File(baseDir, "status");
/* Initialize parser and read parse history to avoid parsing /* Initialize parser and read parse history to avoid parsing
* descriptor files that haven't changed since the last execution. */ * descriptor files that haven't changed since the last execution. */
@ -71,7 +76,8 @@ public class Main {
* other statistics. Write the result to a .csv file that can be * other statistics. Write the result to a .csv file that can be
* processed by other tools. */ * processed by other tools. */
log.info("Aggregating statistics..."); log.info("Aggregating statistics...");
File hidservStatsExtrapolatedCsvFile = new File("stats/hidserv.csv"); File hidservStatsExtrapolatedCsvFile = new File(baseDir,
"stats/hidserv.csv");
Aggregator aggregator = new Aggregator(statusDirectory, Aggregator aggregator = new Aggregator(statusDirectory,
extrapolatedHidServStatsStore, hidservStatsExtrapolatedCsvFile); extrapolatedHidServStatsStore, hidservStatsExtrapolatedCsvFile);
aggregator.aggregateHidServStats(); aggregator.aggregateHidServStats();

View File

@ -0,0 +1,122 @@
package org.torproject.metrics.stats.main;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.File;
import java.io.IOException;
import java.lang.reflect.InvocationTargetException;
import java.nio.file.Files;
import java.nio.file.StandardCopyOption;
import java.util.ArrayList;
import java.util.List;
public class Main {
private static final Logger log = LoggerFactory.getLogger(Main.class);
private static final String baseDir = System.getProperty("metrics.basedir",
"/srv/metrics.torproject.org/metrics");
public static final File modulesDir = new File(baseDir, "work/modules");
public static final File descriptorsDir = new File(baseDir, "work/shared/in");
private static final File statsDir = new File(baseDir, "shared/stats");
/** Start the metrics update run. */
public static void main(String[] args) {
log.info("Starting metrics update run.");
File[] outputDirs = new File[] { modulesDir, statsDir };
for (File outputDir : outputDirs) {
if (outputDir.exists()) {
continue;
}
if (outputDir.mkdirs()) {
log.info("Successfully created module base directory {} and any "
+ "nonexistent parent directories.",
outputDir.getAbsolutePath());
} else {
log.error("Unable to create module base directory {} and any "
+ "nonexistent parent directories. Exiting.",
outputDir.getAbsolutePath());
return;
}
}
Class<?>[] modules = new Class<?>[] {
org.torproject.metrics.stats.collectdescs.Main.class,
org.torproject.metrics.stats.connbidirect.Main.class,
org.torproject.metrics.stats.onionperf.Main.class,
org.torproject.metrics.stats.bwhist.Main.class,
org.torproject.metrics.stats.advbwdist.Main.class,
org.torproject.metrics.stats.hidserv.Main.class,
org.torproject.metrics.stats.clients.Main.class,
org.torproject.metrics.stats.servers.Main.class,
org.torproject.metrics.stats.webstats.Main.class,
org.torproject.metrics.stats.totalcw.Main.class
};
for (Class<?> module : modules) {
try {
log.info("Starting {} module.", module.getName());
module.getDeclaredMethod("main", String[].class)
.invoke(null, (Object) args);
log.info("Completed {} module.", module.getName());
} catch (NoSuchMethodException | IllegalAccessException
| InvocationTargetException e) {
log.warn("Caught an exception when invoking the main method of the {} "
+ "module. Moving on to the next module, if available.", e);
}
}
log.info("Making module data available.");
File[] moduleStatsDirs = new File[] {
new File(modulesDir, "connbidirect/stats"),
new File(modulesDir, "onionperf/stats"),
new File(modulesDir, "bwhist/stats"),
new File(modulesDir, "advbwdist/stats/advbwdist.csv"),
new File(modulesDir, "hidserv/stats"),
new File(modulesDir, "clients/stats/clients.csv"),
new File(modulesDir, "clients/stats/userstats-combined.csv"),
new File(modulesDir, "servers/stats"),
new File(modulesDir, "webstats/stats"),
new File(modulesDir, "totalcw/stats")
};
List<String> copiedFiles = new ArrayList<>();
for (File moduleStatsDir : moduleStatsDirs) {
if (!moduleStatsDir.exists()) {
log.warn("Skipping nonexistent module stats dir {}.", moduleStatsDir);
continue;
}
File[] moduleStatsFiles = moduleStatsDir.isDirectory()
? moduleStatsDir.listFiles() : new File[] { moduleStatsDir };
if (null == moduleStatsFiles) {
log.warn("Skipping nonexistent module stats dir {}.", moduleStatsDir);
continue;
}
for (File statsFile : moduleStatsFiles) {
if (!statsFile.isFile() || !statsFile.getName().endsWith(".csv")) {
continue;
}
try {
Files.copy(statsFile.toPath(),
new File(statsDir, statsFile.getName()).toPath(),
StandardCopyOption.REPLACE_EXISTING);
copiedFiles.add(statsFile.getName());
} catch (IOException e) {
log.warn("Unable to copy module stats file {} to stats output "
+ "directory {}. Skipping.", statsFile, statsDir, e);
}
}
}
if (!copiedFiles.isEmpty()) {
log.info("Successfully copied {} files to stats output directory: {}",
copiedFiles.size(), copiedFiles);
}
log.info("Completed metrics update run.");
}
}

View File

@ -13,7 +13,6 @@ import java.io.IOException;
import java.nio.charset.StandardCharsets; import java.nio.charset.StandardCharsets;
import java.nio.file.Files; import java.nio.file.Files;
import java.nio.file.Path; import java.nio.file.Path;
import java.nio.file.Paths;
import java.sql.Connection; import java.sql.Connection;
import java.sql.DriverManager; import java.sql.DriverManager;
import java.sql.PreparedStatement; import java.sql.PreparedStatement;
@ -37,23 +36,30 @@ public class Main {
/** Logger for this class. */ /** Logger for this class. */
private static Logger log = LoggerFactory.getLogger(Main.class); private static Logger log = LoggerFactory.getLogger(Main.class);
private static final String jdbcString = String.format(
"jdbc:postgresql://localhost/onionperf?user=%s&password=%s",
System.getProperty("metrics.dbuser", "metrics"),
System.getProperty("metrics.dbpass", "password"));
private static final File baseDir = new File(
org.torproject.metrics.stats.main.Main.modulesDir, "onionperf");
/** Executes this data-processing module. */ /** Executes this data-processing module. */
public static void main(String[] args) throws Exception { public static void main(String[] args) throws Exception {
log.info("Starting onionperf module."); log.info("Starting onionperf module.");
String dbUrlString = "jdbc:postgresql:onionperf"; Connection connection = connectToDatabase();
Connection connection = connectToDatabase(dbUrlString);
importOnionPerfFiles(connection); importOnionPerfFiles(connection);
writeStatistics(Paths.get("stats", "torperf-1.1.csv"), writeStatistics(new File(baseDir, "stats/torperf-1.1.csv").toPath(),
queryOnionPerf(connection)); queryOnionPerf(connection));
writeStatistics(Paths.get("stats", "buildtimes.csv"), writeStatistics(new File(baseDir, "stats/buildtimes.csv").toPath(),
queryBuildTimes(connection)); queryBuildTimes(connection));
writeStatistics(Paths.get("stats", "latencies.csv"), writeStatistics(new File(baseDir, "stats/latencies.csv").toPath(),
queryLatencies(connection)); queryLatencies(connection));
disconnectFromDatabase(connection); disconnectFromDatabase(connection);
log.info("Terminated onionperf module."); log.info("Terminated onionperf module.");
} }
private static Connection connectToDatabase(String jdbcString) private static Connection connectToDatabase()
throws SQLException { throws SQLException {
log.info("Connecting to database."); log.info("Connecting to database.");
Connection connection = DriverManager.getConnection(jdbcString); Connection connection = DriverManager.getConnection(jdbcString);
@ -91,8 +97,10 @@ public class Main {
Calendar calendar = Calendar.getInstance(TimeZone.getTimeZone("UTC")); Calendar calendar = Calendar.getInstance(TimeZone.getTimeZone("UTC"));
DescriptorReader dr = DescriptorSourceFactory.createDescriptorReader(); DescriptorReader dr = DescriptorSourceFactory.createDescriptorReader();
for (Descriptor d : dr.readDescriptors( for (Descriptor d : dr.readDescriptors(
new File("../../shared/in/archive/torperf"), new File(org.torproject.metrics.stats.main.Main.descriptorsDir,
new File("../../shared/in/recent/torperf"))) { "archive/torperf"),
new File(org.torproject.metrics.stats.main.Main.descriptorsDir,
"recent/torperf"))) {
if (!(d instanceof TorperfResult)) { if (!(d instanceof TorperfResult)) {
continue; continue;
} }

View File

@ -1,18 +0,0 @@
/* Copyright 2017--2018 The Tor Project
* See LICENSE for licensing information */
package org.torproject.metrics.stats.servers;
/** Configuration options parsed from Java properties with reasonable hard-coded
* defaults. */
class Configuration {
static String descriptors = System.getProperty("servers.descriptors",
"../../shared/in/");
static String database = System.getProperty("servers.database",
"jdbc:postgresql:ipv6servers");
static String history = System.getProperty("servers.history",
"status/read-descriptors");
static String output = System.getProperty("servers.output",
"stats/");
}

View File

@ -29,7 +29,10 @@ import java.util.TimeZone;
class Database implements AutoCloseable { class Database implements AutoCloseable {
/** Database connection string. */ /** Database connection string. */
private String jdbcString; private static final String jdbcString = String.format(
"jdbc:postgresql://localhost/ipv6servers?user=%s&password=%s",
System.getProperty("metrics.dbuser", "metrics"),
System.getProperty("metrics.dbpass", "password"));
/** Connection object for all interactions with the database. */ /** Connection object for all interactions with the database. */
private Connection connection; private Connection connection;
@ -87,15 +90,14 @@ class Database implements AutoCloseable {
/** Create a new Database instance and prepare for inserting or querying /** Create a new Database instance and prepare for inserting or querying
* data. */ * data. */
Database(String jdbcString) throws SQLException { Database() throws SQLException {
this.jdbcString = jdbcString;
this.connect(); this.connect();
this.prepareStatements(); this.prepareStatements();
this.initializeCaches(); this.initializeCaches();
} }
private void connect() throws SQLException { private void connect() throws SQLException {
this.connection = DriverManager.getConnection(this.jdbcString); this.connection = DriverManager.getConnection(jdbcString);
this.connection.setAutoCommit(false); this.connection.setAutoCommit(false);
} }

View File

@ -14,7 +14,6 @@ import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
import java.io.File; import java.io.File;
import java.nio.file.Paths;
import java.sql.SQLException; import java.sql.SQLException;
import java.util.Arrays; import java.util.Arrays;
@ -25,15 +24,18 @@ public class Main {
private static Logger log = LoggerFactory.getLogger(Main.class); private static Logger log = LoggerFactory.getLogger(Main.class);
private static String[][] paths = { private static final File baseDir = new File(
{"recent", "relay-descriptors", "consensuses"}, org.torproject.metrics.stats.main.Main.modulesDir, "servers");
{"recent", "relay-descriptors", "server-descriptors"},
{"recent", "bridge-descriptors", "statuses"}, private static String[] paths = {
{"recent", "bridge-descriptors", "server-descriptors"}, "recent/relay-descriptors/consensuses",
{"archive", "relay-descriptors", "consensuses"}, "recent/relay-descriptors/server-descriptors",
{"archive", "relay-descriptors", "server-descriptors"}, "recent/bridge-descriptors/statuses",
{"archive", "bridge-descriptors", "statuses"}, "recent/bridge-descriptors/server-descriptors",
{"archive", "bridge-descriptors", "server-descriptors"}}; "archive/relay-descriptors/consensuses",
"archive/relay-descriptors/server-descriptors",
"archive/bridge-descriptors/statuses",
"archive/bridge-descriptors/server-descriptors" };
/** Run the module. */ /** Run the module. */
public static void main(String[] args) throws Exception { public static void main(String[] args) throws Exception {
@ -43,15 +45,15 @@ public class Main {
log.info("Reading descriptors and inserting relevant parts into the " log.info("Reading descriptors and inserting relevant parts into the "
+ "database."); + "database.");
DescriptorReader reader = DescriptorSourceFactory.createDescriptorReader(); DescriptorReader reader = DescriptorSourceFactory.createDescriptorReader();
File historyFile = new File(Configuration.history); File historyFile = new File(baseDir, "status/read-descriptors");
reader.setHistoryFile(historyFile); reader.setHistoryFile(historyFile);
Parser parser = new Parser(); Parser parser = new Parser();
try (Database database = new Database(Configuration.database)) { try (Database database = new Database()) {
try { try {
for (Descriptor descriptor : reader.readDescriptors( for (Descriptor descriptor : reader.readDescriptors(
Arrays.stream(paths).map((String[] path) Arrays.stream(paths).map((String path) -> new File(
-> Paths.get(Configuration.descriptors, path).toFile()) org.torproject.metrics.stats.main.Main.descriptorsDir, path))
.toArray(File[]::new))) { .toArray(File[]::new))) {
if (descriptor instanceof ServerDescriptor) { if (descriptor instanceof ServerDescriptor) {
database.insertServerDescriptor(parser.parseServerDescriptor( database.insertServerDescriptor(parser.parseServerDescriptor(
(ServerDescriptor) descriptor)); (ServerDescriptor) descriptor));
@ -86,17 +88,18 @@ public class Main {
reader.saveHistoryFile(historyFile); reader.saveHistoryFile(historyFile);
log.info("Querying aggregated statistics from the database."); log.info("Querying aggregated statistics from the database.");
new Writer().write(Paths.get(Configuration.output, "ipv6servers.csv"), File outputDir = new File(baseDir, "stats");
new Writer().write(new File(outputDir, "ipv6servers.csv").toPath(),
database.queryServersIpv6()); database.queryServersIpv6());
new Writer().write(Paths.get(Configuration.output, "advbw.csv"), new Writer().write(new File(outputDir, "advbw.csv").toPath(),
database.queryAdvbw()); database.queryAdvbw());
new Writer().write(Paths.get(Configuration.output, "networksize.csv"), new Writer().write(new File(outputDir, "networksize.csv").toPath(),
database.queryNetworksize()); database.queryNetworksize());
new Writer().write(Paths.get(Configuration.output, "relayflags.csv"), new Writer().write(new File(outputDir, "relayflags.csv").toPath(),
database.queryRelayflags()); database.queryRelayflags());
new Writer().write(Paths.get(Configuration.output, "versions.csv"), new Writer().write(new File(outputDir, "versions.csv").toPath(),
database.queryVersions()); database.queryVersions());
new Writer().write(Paths.get(Configuration.output, "platforms.csv"), new Writer().write(new File(outputDir, "platforms.csv").toPath(),
database.queryPlatforms()); database.queryPlatforms());
log.info("Terminating servers module."); log.info("Terminating servers module.");

View File

@ -1,18 +0,0 @@
/* Copyright 2018 The Tor Project
* See LICENSE for licensing information */
package org.torproject.metrics.stats.totalcw;
/** Configuration options parsed from Java properties with reasonable hard-coded
* defaults. */
class Configuration {
static String descriptors = System.getProperty("totalcw.descriptors",
"../../shared/in/");
static String database = System.getProperty("totalcw.database",
"jdbc:postgresql:totalcw");
static String history = System.getProperty("totalcw.history",
"status/read-descriptors");
static String output = System.getProperty("totalcw.output",
"stats/totalcw.csv");
}

View File

@ -25,7 +25,10 @@ import java.util.TimeZone;
class Database implements AutoCloseable { class Database implements AutoCloseable {
/** Database connection string. */ /** Database connection string. */
private String jdbcString; private static final String jdbcString = String.format(
"jdbc:postgresql://localhost/totalcw?user=%s&password=%s",
System.getProperty("metrics.dbuser", "metrics"),
System.getProperty("metrics.dbpass", "password"));
/** Connection object for all interactions with the database. */ /** Connection object for all interactions with the database. */
private Connection connection; private Connection connection;
@ -46,14 +49,13 @@ class Database implements AutoCloseable {
/** Create a new Database instance and prepare for inserting or querying /** Create a new Database instance and prepare for inserting or querying
* data. */ * data. */
Database(String jdbcString) throws SQLException { Database() throws SQLException {
this.jdbcString = jdbcString;
this.connect(); this.connect();
this.prepareStatements(); this.prepareStatements();
} }
private void connect() throws SQLException { private void connect() throws SQLException {
this.connection = DriverManager.getConnection(this.jdbcString); this.connection = DriverManager.getConnection(jdbcString);
this.connection.setAutoCommit(false); this.connection.setAutoCommit(false);
} }

View File

@ -13,7 +13,6 @@ import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
import java.io.File; import java.io.File;
import java.nio.file.Paths;
import java.sql.SQLException; import java.sql.SQLException;
import java.util.Arrays; import java.util.Arrays;
@ -24,11 +23,14 @@ public class Main {
private static Logger log = LoggerFactory.getLogger(Main.class); private static Logger log = LoggerFactory.getLogger(Main.class);
private static String[][] paths = { private static final File baseDir = new File(
{"recent", "relay-descriptors", "consensuses"}, org.torproject.metrics.stats.main.Main.modulesDir, "totalcw");
{"archive", "relay-descriptors", "consensuses"},
{"recent", "relay-descriptors", "votes"}, private static String[] paths = {
{"archive", "relay-descriptors", "votes"}}; "recent/relay-descriptors/consensuses",
"archive/relay-descriptors/consensuses",
"recent/relay-descriptors/votes",
"archive/relay-descriptors/votes" };
/** Run the module. */ /** Run the module. */
public static void main(String[] args) throws Exception { public static void main(String[] args) throws Exception {
@ -38,15 +40,15 @@ public class Main {
log.info("Reading consensuses and votes and inserting relevant parts into " log.info("Reading consensuses and votes and inserting relevant parts into "
+ "the database."); + "the database.");
DescriptorReader reader = DescriptorSourceFactory.createDescriptorReader(); DescriptorReader reader = DescriptorSourceFactory.createDescriptorReader();
File historyFile = new File(Configuration.history); File historyFile = new File(baseDir, "status/read-descriptors");
reader.setHistoryFile(historyFile); reader.setHistoryFile(historyFile);
Parser parser = new Parser(); Parser parser = new Parser();
try (Database database = new Database(Configuration.database)) { try (Database database = new Database()) {
try { try {
for (Descriptor descriptor : reader.readDescriptors( for (Descriptor descriptor : reader.readDescriptors(
Arrays.stream(paths).map((String[] path) Arrays.stream(paths).map((String path) -> new File(
-> Paths.get(Configuration.descriptors, path).toFile()) org.torproject.metrics.stats.main.Main.descriptorsDir, path))
.toArray(File[]::new))) { .toArray(File[]::new))) {
if (descriptor instanceof RelayNetworkStatusConsensus) { if (descriptor instanceof RelayNetworkStatusConsensus) {
database.insertConsensus(parser.parseRelayNetworkStatusConsensus( database.insertConsensus(parser.parseRelayNetworkStatusConsensus(
(RelayNetworkStatusConsensus) descriptor)); (RelayNetworkStatusConsensus) descriptor));
@ -71,9 +73,10 @@ public class Main {
log.info("Querying aggregated statistics from the database."); log.info("Querying aggregated statistics from the database.");
Iterable<OutputLine> output = database.queryTotalcw(); Iterable<OutputLine> output = database.queryTotalcw();
log.info("Writing aggregated statistics to {}.", Configuration.output); File outputFile = new File(baseDir, "stats/totalcw.csv");
log.info("Writing aggregated statistics to {}.", outputFile);
if (null != output) { if (null != output) {
new Writer().write(Paths.get(Configuration.output), output); new Writer().write(outputFile.toPath(), output);
} }
log.info("Terminating totalcw module."); log.info("Terminating totalcw module.");

View File

@ -19,7 +19,6 @@ import java.io.IOException;
import java.nio.charset.StandardCharsets; import java.nio.charset.StandardCharsets;
import java.nio.file.Files; import java.nio.file.Files;
import java.nio.file.Path; import java.nio.file.Path;
import java.nio.file.Paths;
import java.sql.Connection; import java.sql.Connection;
import java.sql.Date; import java.sql.Date;
import java.sql.DriverManager; import java.sql.DriverManager;
@ -48,6 +47,11 @@ public class Main {
/** Logger for this class. */ /** Logger for this class. */
private static Logger log = LoggerFactory.getLogger(Main.class); private static Logger log = LoggerFactory.getLogger(Main.class);
private static final String jdbcString = String.format(
"jdbc:postgresql://localhost/webstats?user=%s&password=%s",
System.getProperty("metrics.dbuser", "metrics"),
System.getProperty("metrics.dbpass", "password"));
private static final String LOG_DATE = "log_date"; private static final String LOG_DATE = "log_date";
private static final String REQUEST_TYPE = "request_type"; private static final String REQUEST_TYPE = "request_type";
@ -66,22 +70,27 @@ public class Main {
+ PLATFORM + "," + CHANNEL + "," + LOCALE + "," + INCREMENTAL + "," + PLATFORM + "," + CHANNEL + "," + LOCALE + "," + INCREMENTAL + ","
+ COUNT; + COUNT;
private static final File baseDir = new File(
org.torproject.metrics.stats.main.Main.modulesDir, "webstats");
/** Executes this data-processing module. */ /** Executes this data-processing module. */
public static void main(String[] args) throws Exception { public static void main(String[] args) throws Exception {
log.info("Starting webstats module."); log.info("Starting webstats module.");
String dbUrlString = "jdbc:postgresql:webstats"; Connection connection = connectToDatabase();
Connection connection = connectToDatabase(dbUrlString);
SortedSet<String> skipFiles = queryImportedFileNames(connection); SortedSet<String> skipFiles = queryImportedFileNames(connection);
importLogFiles(connection, skipFiles, importLogFiles(connection, skipFiles,
new File("../../shared/in/recent/webstats"), new File(org.torproject.metrics.stats.main.Main.descriptorsDir,
new File("../../shared/in/archive/webstats")); "recent/webstats"),
new File(org.torproject.metrics.stats.main.Main.descriptorsDir,
"archive/webstats"));
SortedSet<String> statistics = queryWebstats(connection); SortedSet<String> statistics = queryWebstats(connection);
writeStatistics(Paths.get("stats", "webstats.csv"), statistics); writeStatistics(new File(baseDir, "stats/webstats.csv").toPath(),
statistics);
disconnectFromDatabase(connection); disconnectFromDatabase(connection);
log.info("Terminated webstats module."); log.info("Terminated webstats module.");
} }
private static Connection connectToDatabase(String jdbcString) private static Connection connectToDatabase()
throws SQLException { throws SQLException {
log.info("Connecting to database."); log.info("Connecting to database.");
Connection connection = DriverManager.getConnection(jdbcString); Connection connection = DriverManager.getConnection(jdbcString);