Add parsing support for OnionPerf analysis files.

Implements #34070.
This commit is contained in:
Karsten Loesing 2020-04-30 15:56:52 +02:00
parent 3f298b1b57
commit 7eb783968b
10 changed files with 887 additions and 4 deletions

View File

@ -1,5 +1,9 @@
# Changes in version 2.??.? - 2020-??-??
* Medium changes
- Add parsing support for OnionPerf analysis files by converting
and returning contained transfers as Torperf results.
# Changes in version 2.11.0 - 2020-04-13

View File

@ -170,9 +170,9 @@ public interface TorperfResult extends Descriptor {
List<String> getPath();
/**
* Return a list of times in milliseconds since the epoch when circuit
* hops were built, or null if the torperf line didn't contain that
* information.
* Return a list of times in milliseconds between launching the circuit and
* extending to the next circuit hop, or null if the torperf line didn't
* contain that information.
*
* @since 1.0.0
*/

View File

@ -10,6 +10,7 @@ import org.torproject.descriptor.Descriptor;
import org.torproject.descriptor.DescriptorParseException;
import org.torproject.descriptor.DescriptorParser;
import org.torproject.descriptor.log.LogDescriptorImpl;
import org.torproject.descriptor.onionperf.OnionPerfAnalysisConverter;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@ -130,6 +131,9 @@ public class DescriptorParserImpl implements DescriptorParser {
} else if (firstLines.startsWith("@type torperf 1.")) {
return TorperfResultImpl.parseTorperfResults(rawDescriptorBytes,
sourceFile);
} else if (fileName.endsWith(".onionperf.analysis.json.xz")) {
return new OnionPerfAnalysisConverter(rawDescriptorBytes, sourceFile)
.asTorperfResults();
} else if (firstLines.startsWith("@type snowflake-stats 1.")
|| firstLines.startsWith(Key.SNOWFLAKE_STATS_END.keyword + SP)
|| firstLines.contains(NL + Key.SNOWFLAKE_STATS_END.keyword + SP)) {

View File

@ -330,6 +330,8 @@ public class DescriptorReaderImpl implements DescriptorReader {
InputStream is = fis;
if (file.getName().endsWith(".gz")) {
is = new GzipCompressorInputStream(fis);
} else if (file.getName().endsWith(".xz")) {
is = new XZCompressorInputStream(fis);
}
byte[] rawDescriptorBytes = IOUtils.toByteArray(is);
if (rawDescriptorBytes.length > 0) {

View File

@ -23,7 +23,16 @@ public class TorperfResultImpl extends DescriptorImpl
private static final long serialVersionUID = 8961567618137500044L;
protected static List<Descriptor> parseTorperfResults(
/**
* Parse the given descriptor to one or more {@link TorperfResult} instances.
*
* @param rawDescriptorBytes Bytes to parse
* @param descriptorFile Descriptor file containing the given bytes
* @return Parsed {@link TorperfResult} instances
* @throws DescriptorParseException Thrown if any of the lines cannot be
* parsed.
*/
public static List<Descriptor> parseTorperfResults(
byte[] rawDescriptorBytes, File descriptorFile)
throws DescriptorParseException {
if (rawDescriptorBytes.length == 0) {

View File

@ -0,0 +1,326 @@
/* Copyright 2020 The Tor Project
* See LICENSE for licensing information */
package org.torproject.descriptor.onionperf;
import org.torproject.descriptor.Descriptor;
import org.torproject.descriptor.DescriptorParseException;
import org.torproject.descriptor.impl.TorperfResultImpl;
import java.io.File;
import java.io.IOException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
/**
* Converter that takes an OnionPerf analysis document as input and provides one
* or more {@link org.torproject.descriptor.TorperfResult} instances as output.
*
* <p>This conversion matches {@code tgen} transfers and {@code tor} streams by
* stream port and transfer/stream end timestamps. This is different from the
* approach taken in OnionPerf's analyze mode which only matches by stream
* port. The result is that converted Torperf results might contain different
* path or build time information as Torperf results written by OnionPerf.</p>
*/
public class OnionPerfAnalysisConverter {
/**
* Uncompressed OnionPerf analysis file bytes.
*/
private final byte[] rawDescriptorBytes;
/**
* OnionPerf analysis file.
*/
private final File descriptorFile;
/**
* Converted Torperf results.
*/
private List<Descriptor> convertedTorperfResults;
/**
* Construct a new instance from the given bytes and file reference.
*
* @param rawDescriptorBytes Uncompressed document bytes.
* @param descriptorFile Document file reference.
*/
public OnionPerfAnalysisConverter(byte[] rawDescriptorBytes,
File descriptorFile) {
this.rawDescriptorBytes = rawDescriptorBytes;
this.descriptorFile = descriptorFile;
}
/**
* Parse the OnionPerf analysis JSON document, do some basic verification, and
* convert its contents to {@link org.torproject.descriptor.TorperfResult}
* descriptors.
*
* @return Converted transfers.
* @throws DescriptorParseException Thrown if something goes wrong while
* parsing, verifying, or converting the OnionPerf analysis file to
* Torperf results.
*/
public List<Descriptor> asTorperfResults() throws DescriptorParseException {
ParsedOnionPerfAnalysis parsedOnionPerfAnalysis;
try {
parsedOnionPerfAnalysis = ParsedOnionPerfAnalysis.fromBytes(
this.rawDescriptorBytes);
} catch (IOException ioException) {
throw new DescriptorParseException("Ran into an I/O error while "
+ "attempting to parse an OnionPerf analysis document.",
ioException);
}
this.verifyDocumentTypeAndVersion(parsedOnionPerfAnalysis);
StringBuilder formattedTorperfResults
= this.formatTorperfResults(parsedOnionPerfAnalysis);
this.parseFormattedTorperfResults(formattedTorperfResults);
return this.convertedTorperfResults;
}
/**
* Verify document type and version and throw an exception when either of the
* two indicates that we cannot process the document.
*
* @param parsedOnionPerfAnalysis Parsed OnionPerf analysis document.
* @throws DescriptorParseException Thrown if either type or version indicate
* that we cannot process the document.
*/
private void verifyDocumentTypeAndVersion(
ParsedOnionPerfAnalysis parsedOnionPerfAnalysis)
throws DescriptorParseException {
if (!"onionperf".equals(parsedOnionPerfAnalysis.type)) {
throw new DescriptorParseException("Parsed OnionPerf analysis file does "
+ "not contain type information.");
}
if (null == parsedOnionPerfAnalysis.version) {
throw new DescriptorParseException("Parsed OnionPerf analysis file does "
+ "not contain version information.");
} else if ((parsedOnionPerfAnalysis.version instanceof Double
&& (double) parsedOnionPerfAnalysis.version > 1.999)
|| (parsedOnionPerfAnalysis.version instanceof String
&& !((String) parsedOnionPerfAnalysis.version).startsWith("1."))) {
throw new DescriptorParseException("Parsed OnionPerf analysis file "
+ "contains unsupported version " + parsedOnionPerfAnalysis.version
+ ".");
}
}
/**
* Format the parsed OnionPerf analysis file as one or more Torperf result
* strings.
*
* @param parsedOnionPerfAnalysis Parsed OnionPerf analysis document.
*/
private StringBuilder formatTorperfResults(
ParsedOnionPerfAnalysis parsedOnionPerfAnalysis) {
StringBuilder formattedTorperfResults = new StringBuilder();
Map<String, String> errorCodes = new HashMap<>();
errorCodes.put("AUTH", "TGEN/AUTH");
errorCodes.put("READ", "TGEN/READ");
errorCodes.put("STALLOUT", "TGEN/STALLOUT");
errorCodes.put("TIMEOUT", "TGEN/TIMEOUT");
errorCodes.put("PROXY", "TOR");
errorCodes.put("PROXY_CANT_ATTACH", "TOR/CANT_ATTACH");
errorCodes.put("PROXY_DESTROY", "TOR/DESTROY");
errorCodes.put("PROXY_END_TIMEOUT", "TOR/END/TIMEOUT");
errorCodes.put("PROXY_END_CONNECTREFUSED", "TOR/END/CONNECTREFUSED");
errorCodes.put("PROXY_RESOLVEFAILED", "TOR/RESOLVEFAILED");
errorCodes.put("PROXY_TIMEOUT", "TOR/TIMEOUT");
for (Map.Entry<String, ParsedOnionPerfAnalysis.MeasurementData> data
: parsedOnionPerfAnalysis.data.entrySet()) {
String nickname = data.getKey();
ParsedOnionPerfAnalysis.MeasurementData measurements = data.getValue();
if (null == measurements.measurementIp || null == measurements.tgen
|| null == measurements.tgen.transfers) {
continue;
}
String measurementIp = measurements.measurementIp;
Map<String, List<ParsedOnionPerfAnalysis.Stream>> streamsBySourcePort
= new HashMap<>();
Map<String, ParsedOnionPerfAnalysis.Circuit> circuitsByCircuitId
= new HashMap<>();
if (null != measurements.tor) {
circuitsByCircuitId = measurements.tor.circuits;
if (null != measurements.tor.streams) {
for (ParsedOnionPerfAnalysis.Stream stream
: measurements.tor.streams.values()) {
if (null != stream.source && stream.source.contains(":")) {
String sourcePort = stream.source.split(":")[1];
streamsBySourcePort.putIfAbsent(sourcePort, new ArrayList<>());
streamsBySourcePort.get(sourcePort).add(stream);
}
}
}
}
for (ParsedOnionPerfAnalysis.Transfer transfer
: measurements.tgen.transfers.values()) {
if (null == transfer.endpointLocal) {
continue;
}
String[] endpointLocalParts = transfer.endpointLocal.split(":");
if (endpointLocalParts.length < 3) {
continue;
}
TorperfResultsBuilder torperfResultsBuilder
= new TorperfResultsBuilder();
torperfResultsBuilder.addString("SOURCE", nickname);
torperfResultsBuilder.addString("SOURCEADDRESS", measurementIp);
this.formatTransferParts(torperfResultsBuilder, transfer);
List<String> errorCodeParts = null;
if (transfer.isError) {
errorCodeParts = new ArrayList<>();
errorCodeParts.add(transfer.errorCode);
}
String sourcePort = endpointLocalParts[2];
if (streamsBySourcePort.containsKey(sourcePort)) {
for (ParsedOnionPerfAnalysis.Stream stream
: streamsBySourcePort.get(sourcePort)) {
if (Math.abs(transfer.unixTsEnd - stream.unixTsEnd) < 150.0) {
if (null != errorCodeParts && null != stream.failureReasonLocal) {
errorCodeParts.add(stream.failureReasonLocal);
if (null != stream.failureReasonRemote) {
errorCodeParts.add(stream.failureReasonRemote);
}
}
if (null != stream.circuitId
&& circuitsByCircuitId.containsKey(stream.circuitId)) {
ParsedOnionPerfAnalysis.Circuit circuit
= circuitsByCircuitId.get(stream.circuitId);
this.formatStreamParts(torperfResultsBuilder, stream);
this.formatCircuitParts(torperfResultsBuilder, circuit);
}
}
}
}
if (null != errorCodeParts) {
String errorCode = String.join("_", errorCodeParts);
torperfResultsBuilder.addString("ERRORCODE",
errorCodes.getOrDefault(errorCode, errorCode));
}
formattedTorperfResults.append(torperfResultsBuilder.build());
}
}
return formattedTorperfResults;
}
/**
* Parse the previously formatted Torperf results.
*
* @param formattedTorperfResults Formatted Torperf result strings.
* @throws DescriptorParseException Thrown when an error occurs while parsing
* a previously formatted {@link org.torproject.descriptor.TorperfResult}
* string.
*/
private void parseFormattedTorperfResults(
StringBuilder formattedTorperfResults) throws DescriptorParseException {
this.convertedTorperfResults = TorperfResultImpl.parseTorperfResults(
formattedTorperfResults.toString().getBytes(), this.descriptorFile);
}
/**
* Format relevant tgen transfer data as Torperf result key-value pairs.
*
* @param torperfResultsBuilder Torperf results builder to add key-value pairs
* to.
* @param transfer Transfer data obtained from the parsed OnionPerf analysis
* file.
*/
private void formatTransferParts(TorperfResultsBuilder torperfResultsBuilder,
ParsedOnionPerfAnalysis.Transfer transfer) {
torperfResultsBuilder.addString("ENDPOINTLOCAL", transfer.endpointLocal);
torperfResultsBuilder.addString("ENDPOINTPROXY", transfer.endpointProxy);
torperfResultsBuilder.addString("ENDPOINTREMOTE", transfer.endpointRemote);
torperfResultsBuilder.addString("HOSTNAMELOCAL", transfer.hostnameLocal);
torperfResultsBuilder.addString("HOSTNAMEREMOTE", transfer.hostnameRemote);
torperfResultsBuilder.addInteger("FILESIZE", transfer.filesizeBytes);
torperfResultsBuilder.addInteger("READBYTES", transfer.totalBytesRead);
torperfResultsBuilder.addInteger("WRITEBYTES", transfer.totalBytesWrite);
torperfResultsBuilder.addInteger("DIDTIMEOUT", 0);
for (String key : new String[] { "START", "SOCKET", "CONNECT", "NEGOTIATE",
"REQUEST", "RESPONSE", "DATAREQUEST", "DATARESPONSE", "DATACOMPLETE",
"LAUNCH", "DATAPERC10", "DATAPERC20", "DATAPERC30", "DATAPERC40",
"DATAPERC50", "DATAPERC60", "DATAPERC70", "DATAPERC80", "DATAPERC90",
"DATAPERC100" }) {
torperfResultsBuilder.addString(key, "0.0");
}
torperfResultsBuilder.addTimestamp("START", transfer.unixTsStart, 0.0);
if (null != transfer.unixTsStart && null != transfer.elapsedSeconds) {
torperfResultsBuilder.addTimestamp("SOCKET", transfer.unixTsStart,
transfer.elapsedSeconds.socketCreate);
torperfResultsBuilder.addTimestamp("CONNECT", transfer.unixTsStart,
transfer.elapsedSeconds.socketConnect);
torperfResultsBuilder.addTimestamp("NEGOTIATE", transfer.unixTsStart,
transfer.elapsedSeconds.proxyChoice);
torperfResultsBuilder.addTimestamp("REQUEST", transfer.unixTsStart,
transfer.elapsedSeconds.proxyRequest);
torperfResultsBuilder.addTimestamp("RESPONSE", transfer.unixTsStart,
transfer.elapsedSeconds.proxyResponse);
torperfResultsBuilder.addTimestamp("DATAREQUEST", transfer.unixTsStart,
transfer.elapsedSeconds.command);
torperfResultsBuilder.addTimestamp("DATARESPONSE", transfer.unixTsStart,
transfer.elapsedSeconds.response);
if (null != transfer.elapsedSeconds.payloadProgress) {
for (Map.Entry<String, Double> payloadProgressEntry
: transfer.elapsedSeconds.payloadProgress.entrySet()) {
String key = String.format("DATAPERC%.0f",
Double.parseDouble(payloadProgressEntry.getKey()) * 100.0);
Double elapsedSeconds = payloadProgressEntry.getValue();
torperfResultsBuilder.addTimestamp(key, transfer.unixTsStart,
elapsedSeconds);
}
}
torperfResultsBuilder.addTimestamp("DATACOMPLETE", transfer.unixTsStart,
transfer.elapsedSeconds.lastByte);
if (transfer.isError) {
torperfResultsBuilder.addInteger("DIDTIMEOUT", 1);
}
}
}
/**
* Format relevant stream data as Torperf result key-value pairs.
*
* @param torperfResultsBuilder Torperf results builder to add key-value pairs
* to.
* @param stream Stream data obtained from the parsed OnionPerf analysis file.
*/
private void formatStreamParts(TorperfResultsBuilder torperfResultsBuilder,
ParsedOnionPerfAnalysis.Stream stream) {
torperfResultsBuilder.addTimestamp("USED_AT", stream.unixTsEnd, 0.0);
torperfResultsBuilder.addInteger("USED_BY", stream.streamId);
}
/**
* Format relevant circuit data as Torperf result key-value pairs.
*
* @param torperfResultsBuilder Torperf results builder to add key-value pairs
* to.
* @param circuit Circuit data obtained from the parsed OnionPerf analysis
* file.
*/
private void formatCircuitParts(TorperfResultsBuilder torperfResultsBuilder,
ParsedOnionPerfAnalysis.Circuit circuit) {
torperfResultsBuilder.addTimestamp("LAUNCH", circuit.unixTsStart, 0.0);
if (null != circuit.path) {
List<String> path = new ArrayList<>();
List<String> buildTimes = new ArrayList<>();
for (Object[] pathElement : circuit.path) {
String fingerprintAndNickname = (String) pathElement[0];
String fingerprint = fingerprintAndNickname.split("~")[0];
path.add(fingerprint);
buildTimes.add(String.format("%.2f", (Double) pathElement[1]));
}
torperfResultsBuilder.addString("PATH", String.join(",", path));
torperfResultsBuilder.addString("BUILDTIMES",
String.join(",", buildTimes));
torperfResultsBuilder.addInteger("TIMEOUT", circuit.buildTimeout);
torperfResultsBuilder.addDouble("QUANTILE", circuit.buildQuantile);
torperfResultsBuilder.addInteger("CIRC_ID", circuit.circuitId);
}
}
}

View File

@ -0,0 +1,329 @@
/* Copyright 2020 The Tor Project
* See LICENSE for licensing information */
package org.torproject.descriptor.onionperf;
import com.fasterxml.jackson.annotation.JsonAutoDetect;
import com.fasterxml.jackson.annotation.PropertyAccessor;
import com.fasterxml.jackson.databind.DeserializationFeature;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.PropertyNamingStrategy;
import java.io.IOException;
import java.util.Map;
/**
* Parsed OnionPerf analysis document with all relevant fields for
* {@link OnionPerfAnalysisConverter} to convert contained measurements to
* {@link org.torproject.descriptor.TorperfResult} instances.
*/
public class ParsedOnionPerfAnalysis {
/**
* Object mapper for deserializing OnionPerf analysis documents to instances
* of this class.
*/
private static final ObjectMapper objectMapper = new ObjectMapper()
.setPropertyNamingStrategy(PropertyNamingStrategy.SNAKE_CASE)
.setVisibility(PropertyAccessor.ALL, JsonAutoDetect.Visibility.NONE)
.setVisibility(PropertyAccessor.FIELD, JsonAutoDetect.Visibility.ANY)
.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false);
/**
* Deserialize an OnionPerf analysis document from the given uncompressed
* bytes.
*
* @param bytes Uncompressed contents of the OnionPerf analysis to
* deserialize.
* @return Parsed OnionPerf analysis document.
* @throws IOException Thrown if something goes wrong while deserializing the
* given JSON document, but before doing any verification or
* postprocessing.
*/
static ParsedOnionPerfAnalysis fromBytes(byte[] bytes) throws IOException {
return objectMapper.readValue(bytes, ParsedOnionPerfAnalysis.class);
}
/**
* OnionPerf measurement data by source nickname.
*/
Map<String, MeasurementData> data;
/**
* Descriptor type, which should always be {@code "onionperf"} for OnionPerf
* analysis documents.
*/
String type;
/**
* Document version, which is either a {@link Double} in version 1.0 or a
* {@link String} in subsequent versions.
*/
Object version;
/**
* Measurement data obtained from client-side {@code tgen} and {@code tor}
* controller event logs.
*/
static class MeasurementData {
/**
* Public IP address of the OnionPerf host obtained by connecting to
* well-known servers and finding the IP address in the result, which may be
* {@code "unknown"} if OnionPerf was not able to find this information.
*/
String measurementIp;
/**
* Measurement data obtained from client-side {@code tgen} logs.
*/
TgenData tgen;
/**
* Measurement data obtained from client-side {@code tor} controller event
* logs.
*/
TorData tor;
}
/**
* Measurement data obtained from client-side {@code tgen} logs.
*/
static class TgenData {
/**
* Measurement data by transfer identifier.
*/
Map<String, Transfer> transfers;
}
/**
* Measurement data related to a single transfer obtained from client-side
* {@code tgen} logs.
*/
static class Transfer {
/**
* Elapsed seconds between starting a transfer at {@link #unixTsStart} and
* reaching a set of pre-defined states.
*/
ElapsedSeconds elapsedSeconds;
/**
* Hostname, IP address, and port that the {@code tgen} client used to
* connect to the local {@code tor} SOCKS port, formatted as
* {@code "hostname:ip:port"}, which may be {@code "NULL:0.0.0.0:0"} if
* {@code tgen} was not able to find this information.
*/
String endpointLocal;
/**
* Hostname, IP address, and port that the {@code tgen} client used to
* connect to the SOCKS proxy server that {@code tor} runs, formatted as
* {@code "hostname:ip:port"}, which may be {@code "NULL:0.0.0.0:0"} if
* {@code tgen} was not able to find this information.
*/
String endpointProxy;
/**
* Hostname, IP address, and port that the {@code tgen} client used to
* connect to the remote server, formatted as {@code "hostname:ip:port"},
* which may be {@code "NULL:0.0.0.0:0"} if {@code tgen} was not able to
* find this information.
*/
String endpointRemote;
/**
* Error code reported in the client {@code tgen} logs, which can be
* {@code "NONE"} if no error was encountered, {@code "PROXY"} in case of an
* error in {@code tor}, or something else for {@code tgen}-specific errors.
*/
String errorCode;
/**
* File size in bytes of the requested file in this transfer.
*/
Integer filesizeBytes;
/**
* Client machine hostname, which may be {@code "(NULL)"} if the
* {@code tgen} client was not able to find this information.
*/
String hostnameLocal;
/**
* Server machine hostname, which may be {@code "(NULL)"} if the
* {@code tgen} server was not able to find this information.
*/
String hostnameRemote;
/**
* Whether or not an error was encountered in this transfer.
*/
Boolean isError;
/**
* Total number of bytes read in this transfer.
*/
Integer totalBytesRead;
/**
* Total number of bytes written in this transfer.
*/
Integer totalBytesWrite;
/**
* Unix timestamp when this transfer started.
*/
Double unixTsStart;
/**
* Unix timestamp when this transfer ended.
*/
Double unixTsEnd;
}
/**
* Elapsed seconds between starting a transfer and reaching a set of
* pre-defined states.
*/
static class ElapsedSeconds {
/**
* Time until the HTTP request was written.
*/
Double command;
/**
* Time until the payload was complete.
*/
Double lastByte;
/**
* Time until the given fraction of expected bytes were read.
*/
Map<String, Double> payloadProgress;
/**
* Time until SOCKS 5 authentication methods have been negotiated.
*/
Double proxyChoice;
/**
* Time until the SOCKS request was sent.
*/
Double proxyRequest;
/**
* Time until the SOCKS response was received.
*/
Double proxyResponse;
/**
* Time until the first response was received.
*/
Double response;
/**
* Time until the socket was connected.
*/
Double socketConnect;
/**
* Time until the socket was created.
*/
Double socketCreate;
}
/**
* Measurement data obtained from client-side {@code tor} controller event
* logs.
*/
static class TorData {
/**
* Circuits by identifier.
*/
Map<String, Circuit> circuits;
/**
* Streams by identifier.
*/
Map<String, Stream> streams;
}
/**
* Measurement data related to a single circuit obtained from client-side
* {@code tor} controller event logs.
*/
static class Circuit {
/**
* Circuit build time quantile that the {@code tor} client uses to determine
* its circuit-build timeout.
*/
Double buildQuantile;
/**
* Circuit build timeout in milliseconds that the {@code tor} client used
* when building this circuit.
*/
Integer buildTimeout;
/**
* Circuit identifier.
*/
Integer circuitId;
/**
* Path information as two-dimensional array with a mixed-type
* {@link Object[]} for each hop with {@code "$fingerprint~nickname"} as
* first element and elapsed seconds between creating and extending the
* circuit as second element.
*/
Object[][] path;
/**
* Unix timestamp at the start of this circuit's lifetime.
*/
Double unixTsStart;
}
/**
* Measurement data related to a single stream obtained from client-side
* {@code tor} controller event logs.
*/
static class Stream {
/**
* Circuit identifier of the circuit that this stream was attached to.
*/
String circuitId;
/**
* Local reason why this stream failed.
*/
String failureReasonLocal;
/**
* Remote reason why this stream failed.
*/
String failureReasonRemote;
/**
* Source address and port that requested the connection.
*/
String source;
/**
* Stream identifier.
*/
Integer streamId;
/**
* Unix timestamp at the end of this stream's lifetime.
*/
Double unixTsEnd;
}
}

View File

@ -0,0 +1,101 @@
/* Copyright 2020 The Tor Project
* See LICENSE for licensing information */
package org.torproject.descriptor.onionperf;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.SortedMap;
import java.util.TreeMap;
/**
* Builder that accepts key-value pairs and produces a single line in the
* Torperf results format.
*/
public class TorperfResultsBuilder {
/**
* Key-value pairs to be formatted as Torperf results line.
*/
private final SortedMap<String, String> keyValuePairs = new TreeMap<>();
/**
* Add a string value, unless it is {@code null}.
*
* @param key Key
* @param stringValue String value
*/
void addString(String key, String stringValue) {
if (null != stringValue) {
this.keyValuePairs.put(key, stringValue);
}
}
/**
* Add an int value, unless it is {@code null}.
*
* @param key Key.
* @param integerValue Int value.
*/
void addInteger(String key, Integer integerValue) {
if (null != integerValue) {
keyValuePairs.put(key, String.valueOf(integerValue));
}
}
/**
* Add a double value, unless it is {@code null}.
*
* @param key Key.
* @param doubleValue Double value.
*/
void addDouble(String key, Double doubleValue) {
if (null != doubleValue) {
keyValuePairs.put(key, String.valueOf(doubleValue));
}
}
/**
* Add a timestamp value as the sum of two double values, formatted as seconds
* since the epoch with two decimal places, unless the first summand is
* {@code null}.
*
* @param key Key.
* @param unixTsStart First summand representing seconds since the epoch.
* @param elapsedSeconds Second summand representing seconds elapsed since the
* first summand.
*/
void addTimestamp(String key, Double unixTsStart, Double elapsedSeconds) {
if (null != unixTsStart) {
if (null != elapsedSeconds) {
keyValuePairs.put(key, String.format("%.2f",
unixTsStart + elapsedSeconds));
} else {
keyValuePairs.put(key, String.format("%.2f", unixTsStart));
}
}
}
/**
* Build the Torperf results line by putting together all key-value pairs as
* {@code "key=value"}, separated by spaces, prefixed by an annotation line
* {@code "@type torperf 1.1"}.
*
* @return Torperf results line using the same format as OnionPerf would
* write it.
*/
String build() {
StringBuilder result = new StringBuilder();
result.append("@type torperf 1.1\r\n");
List<String> torperfResultsParts = new ArrayList<>();
for (Map.Entry<String, String> keyValuePairsEntry
: this.keyValuePairs.entrySet()) {
torperfResultsParts.add(String.format("%s=%s",
keyValuePairsEntry.getKey(), keyValuePairsEntry.getValue()));
}
result.append(String.join(" ", torperfResultsParts)).append("\r\n");
return result.toString();
}
}

View File

@ -0,0 +1,108 @@
/* Copyright 2020 The Tor Project
* See LICENSE for licensing information */
package org.torproject.descriptor.onionperf;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertTrue;
import org.torproject.descriptor.Descriptor;
import org.torproject.descriptor.DescriptorParseException;
import org.torproject.descriptor.TorperfResult;
import org.apache.commons.compress.compressors.xz.XZCompressorInputStream;
import org.apache.commons.compress.utils.IOUtils;
import org.junit.Test;
import java.io.IOException;
import java.io.InputStream;
import java.net.URL;
public class OnionPerfAnalysisConverterTest {
private final String torperfResultTransfer1m1
= "BUILDTIMES=0.15,0.22,0.34 CIRC_ID=39 CONNECT=1587991280.37 "
+ "DATACOMPLETE=1587991286.62 DATAPERC0=1587991283.81 "
+ "DATAPERC10=1587991284.15 DATAPERC100=1587991286.62 "
+ "DATAPERC20=1587991284.38 DATAPERC30=1587991284.66 "
+ "DATAPERC40=1587991284.93 DATAPERC50=1587991285.14 "
+ "DATAPERC60=1587991285.33 DATAPERC70=1587991285.67 "
+ "DATAPERC80=1587991285.85 DATAPERC90=1587991286.14 "
+ "DATAREQUEST=1587991283.36 DATARESPONSE=1587991283.81 DIDTIMEOUT=0 "
+ "ENDPOINTLOCAL=localhost:127.0.0.1:40878 "
+ "ENDPOINTPROXY=localhost:127.0.0.1:35900 "
+ "ENDPOINTREMOTE=m3eahz7co6lzi6jn.onion:0.0.0.0:443 FILESIZE=1048576 "
+ "HOSTNAMELOCAL=op-nl2 HOSTNAMEREMOTE=op-nl2 LAUNCH=1587991281.38 "
+ "NEGOTIATE=1587991280.37 "
+ "PATH=$970F0966DAA7EBDEE44E3772045527A6854E997B,"
+ "$8101421BEFCCF4C271D5483C5AABCAAD245BBB9D,"
+ "$1A7A2516A961F2838F7F94786A8811BE82F9CFFE READBYTES=1048643 "
+ "REQUEST=1587991280.38 RESPONSE=1587991280.37 SOCKET=1587991280.37 "
+ "SOURCE=op-nl2 SOURCEADDRESS=unknown START=1587991280.37 "
+ "USED_AT=1587991286.62 USED_BY=71 WRITEBYTES=53";
private final String torperfResultTransfer1m3
= "BUILDTIMES=22.81,23.57,24.45 CIRC_ID=72 CONNECT=1587991880.37 "
+ "DATACOMPLETE=1587991927.74 DATAPERC0=1587991910.74 "
+ "DATAPERC10=1587991913.71 DATAPERC100=1587991927.74 "
+ "DATAPERC20=1587991916.00 DATAPERC30=1587991917.92 "
+ "DATAPERC40=1587991919.69 DATAPERC50=1587991921.80 "
+ "DATAPERC60=1587991923.35 DATAPERC70=1587991924.91 "
+ "DATAPERC80=1587991925.77 DATAPERC90=1587991927.04 "
+ "DATAREQUEST=1587991909.80 DATARESPONSE=1587991910.74 DIDTIMEOUT=0 "
+ "ENDPOINTLOCAL=localhost:127.0.0.1:41016 "
+ "ENDPOINTPROXY=localhost:127.0.0.1:35900 "
+ "ENDPOINTREMOTE=3czoq6qyehjio6lcdo4tb4vk5uv2bm4gfk5iacnawza22do6klsj7wy"
+ "d.onion:0.0.0.0:443 FILESIZE=1048576 HOSTNAMELOCAL=op-nl2 "
+ "HOSTNAMEREMOTE=op-nl2 LAUNCH=1587991881.70 NEGOTIATE=1587991880.37 "
+ "PATH=$D5C6F62A5D1B3C711CA5E6F9D3772A432E96F6C2,"
+ "$94EC34B871936504BE70671B44760BC99242E1F3,"
+ "$E0F638ECCE918B5455CE29D2CD9ECC9DBD8F8B21 READBYTES=1048643 "
+ "REQUEST=1587991880.37 RESPONSE=1587991880.37 SOCKET=1587991880.37 "
+ "SOURCE=op-nl2 SOURCEADDRESS=unknown START=1587991880.37 "
+ "USED_AT=1587991927.74 USED_BY=112 WRITEBYTES=53";
private final String torperfResultTransfer50k2
= "BUILDTIMES=0.09,0.15,0.27 CIRC_ID=49 CONNECT=1587991580.81 "
+ "DATACOMPLETE=1587991580.80 DATAPERC10=0.0 DATAPERC100=0.0 "
+ "DATAPERC20=0.0 DATAPERC30=0.0 DATAPERC40=0.0 DATAPERC50=0.0 "
+ "DATAPERC60=0.0 DATAPERC70=0.0 DATAPERC80=0.0 DATAPERC90=0.0 "
+ "DATAREQUEST=1587991580.80 DATARESPONSE=1587991580.80 DIDTIMEOUT=1 "
+ "ENDPOINTLOCAL=localhost:127.0.0.1:40948 "
+ "ENDPOINTPROXY=localhost:127.0.0.1:35900 "
+ "ENDPOINTREMOTE=37.218.245.95:37.218.245.95:443 "
+ "ERRORCODE=PROXY_END_MISC FILESIZE=51200 HOSTNAMELOCAL=op-nl2 "
+ "HOSTNAMEREMOTE=(null) LAUNCH=1587991454.80 NEGOTIATE=1587991580.81 "
+ "PATH=$12CF6DB4DAE106206D6C6B09988E865C0509843B,"
+ "$1DC17C4A52A458B5C8B1E79157F8665696210E10,"
+ "$39F17EC1BD41E652D1B80484D268E3933476FF42 READBYTES=0 "
+ "REQUEST=1587991580.84 RESPONSE=1587991580.80 SOCKET=1587991580.81 "
+ "SOURCE=op-nl2 SOURCEADDRESS=unknown START=1587991580.80 "
+ "USED_AT=1587991580.84 USED_BY=93 WRITEBYTES=0";
@Test
public void testAsTorperfResults() throws IOException,
DescriptorParseException {
URL resouce = getClass().getClassLoader().getResource(
"onionperf/onionperf.analysis.json.xz");
assertNotNull(resouce);
InputStream compressedInputStream = resouce.openStream();
assertNotNull(compressedInputStream);
InputStream uncompressedInputStream = new XZCompressorInputStream(
compressedInputStream);
byte[] rawDescriptorBytes = IOUtils.toByteArray(uncompressedInputStream);
OnionPerfAnalysisConverter onionPerfAnalysisConverter
= new OnionPerfAnalysisConverter(rawDescriptorBytes, null);
for (Descriptor descriptor
: onionPerfAnalysisConverter.asTorperfResults()) {
assertTrue(descriptor instanceof TorperfResult);
String formattedTorperfResult
= new String(descriptor.getRawDescriptorBytes()).trim();
assertNotNull(formattedTorperfResult);
assertTrue(formattedTorperfResult.equals(torperfResultTransfer1m1)
|| formattedTorperfResult.equals(torperfResultTransfer1m3)
|| formattedTorperfResult.equals(torperfResultTransfer50k2));
}
}
}