[Author Prev][Author Next][Thread Prev][Thread Next][Author Index][Thread Index]
[tor-commits] [onionoo/master] Compile graph histories in a single place.
commit 314feacaf18dae7e1f29861b39a74fc3df2d1330
Author: Karsten Loesing <karsten.loesing@xxxxxxx>
Date: Wed Jan 17 20:29:11 2018 +0100
Compile graph histories in a single place.
---
.../org/torproject/onionoo/docs/UptimeHistory.java | 4 +
.../onionoo/writer/BandwidthDocumentWriter.java | 120 +-------
.../onionoo/writer/ClientsDocumentWriter.java | 130 +--------
.../onionoo/writer/GraphHistoryCompiler.java | 254 +++++++++++++++++
.../onionoo/writer/UptimeDocumentWriter.java | 312 +++++++--------------
.../onionoo/writer/WeightsDocumentWriter.java | 124 +-------
.../writer/BandwidthDocumentWriterTest.java | 4 +-
.../onionoo/writer/GraphHistoryCompilerTest.java | 203 ++++++++++++++
.../onionoo/writer/UptimeDocumentWriterTest.java | 14 +-
9 files changed, 606 insertions(+), 559 deletions(-)
diff --git a/src/main/java/org/torproject/onionoo/docs/UptimeHistory.java b/src/main/java/org/torproject/onionoo/docs/UptimeHistory.java
index 60e283f..f8cc116 100644
--- a/src/main/java/org/torproject/onionoo/docs/UptimeHistory.java
+++ b/src/main/java/org/torproject/onionoo/docs/UptimeHistory.java
@@ -32,6 +32,10 @@ public class UptimeHistory implements Comparable<UptimeHistory> {
return this.uptimeHours;
}
+ public long getEndMillis() {
+ return this.startMillis + DateTimeHelper.ONE_HOUR * this.uptimeHours;
+ }
+
private SortedSet<String> flags;
public SortedSet<String> getFlags() {
diff --git a/src/main/java/org/torproject/onionoo/writer/BandwidthDocumentWriter.java b/src/main/java/org/torproject/onionoo/writer/BandwidthDocumentWriter.java
index 2f27271..71595e2 100644
--- a/src/main/java/org/torproject/onionoo/writer/BandwidthDocumentWriter.java
+++ b/src/main/java/org/torproject/onionoo/writer/BandwidthDocumentWriter.java
@@ -15,12 +15,7 @@ import org.torproject.onionoo.docs.UpdateStatus;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
-import java.time.LocalDateTime;
import java.time.Period;
-import java.time.ZoneOffset;
-import java.util.ArrayList;
-import java.util.LinkedHashMap;
-import java.util.List;
import java.util.Map;
import java.util.SortedMap;
import java.util.SortedSet;
@@ -100,114 +95,17 @@ public class BandwidthDocumentWriter implements DocumentWriter {
private Map<String, GraphHistory> compileGraphType(long lastSeenMillis,
SortedMap<Long, long[]> history) {
- Map<String, GraphHistory> graphs = new LinkedHashMap<>();
+ GraphHistoryCompiler ghc = new GraphHistoryCompiler(
+ lastSeenMillis + DateTimeHelper.ONE_HOUR);
for (int i = 0; i < this.graphIntervals.length; i++) {
- String graphName = this.graphNames[i];
- Period graphInterval = this.graphIntervals[i];
- long dataPointInterval = this.dataPointIntervals[i];
- List<Long> dataPoints = new ArrayList<>();
- long graphEndMillis = ((lastSeenMillis + DateTimeHelper.ONE_HOUR)
- / dataPointInterval) * dataPointInterval;
- long graphStartMillis = LocalDateTime
- .ofEpochSecond(graphEndMillis / 1000L, 0, ZoneOffset.UTC)
- .minus(graphInterval)
- .toEpochSecond(ZoneOffset.UTC) * 1000L;
- long intervalStartMillis = graphStartMillis;
- long totalMillis = 0L;
- long totalBandwidth = 0L;
- for (long[] v : history.values()) {
- long endMillis = v[1];
- if (endMillis <= intervalStartMillis) {
- continue;
- } else if (endMillis > graphEndMillis) {
- break;
- }
- long startMillis = v[0];
- if (endMillis - startMillis > dataPointInterval) {
- /* This history interval is too long for this graph's data point
- * interval. Maybe the next graph will contain it, but not this
- * one. */
- continue;
- }
- while ((intervalStartMillis / dataPointInterval)
- != ((endMillis - 1L) / dataPointInterval)) {
- dataPoints.add(totalMillis * 5L < dataPointInterval
- ? -1L : (totalBandwidth * DateTimeHelper.ONE_SECOND)
- / totalMillis);
- totalBandwidth = 0L;
- totalMillis = 0L;
- intervalStartMillis += dataPointInterval;
- }
- long bandwidth = v[2];
- totalBandwidth += bandwidth;
- totalMillis += (endMillis - startMillis);
- }
- dataPoints.add(totalMillis * 5L < dataPointInterval
- ? -1L : (totalBandwidth * DateTimeHelper.ONE_SECOND)
- / totalMillis);
- long maxValue = 1L;
- int firstNonNullIndex = -1;
- int lastNonNullIndex = -1;
- for (int j = 0; j < dataPoints.size(); j++) {
- long dataPoint = dataPoints.get(j);
- if (dataPoint >= 0L) {
- if (firstNonNullIndex < 0) {
- firstNonNullIndex = j;
- }
- lastNonNullIndex = j;
- if (dataPoint > maxValue) {
- maxValue = dataPoint;
- }
- }
- }
- if (firstNonNullIndex < 0) {
- continue;
- }
- long firstDataPointMillis = graphStartMillis + firstNonNullIndex
- * dataPointInterval + dataPointInterval / 2L;
- if (i > 0 && !graphs.isEmpty() && firstDataPointMillis >= LocalDateTime
- .ofEpochSecond(graphEndMillis / 1000L, 0, ZoneOffset.UTC)
- .minus(graphIntervals[i - 1])
- .toEpochSecond(ZoneOffset.UTC) * 1000L) {
-
- /* Skip bandwidth history object, because it doesn't contain
- * anything new that wasn't already contained in the last
- * bandwidth history object(s). Unless we did not include any of
- * the previous bandwidth history objects for other reasons, in
- * which case we should include this one. */
- continue;
- }
- long lastDataPointMillis = firstDataPointMillis
- + (lastNonNullIndex - firstNonNullIndex) * dataPointInterval;
- double factor = ((double) maxValue) / 999.0;
- int count = lastNonNullIndex - firstNonNullIndex + 1;
- GraphHistory graphHistory = new GraphHistory();
- graphHistory.setFirst(firstDataPointMillis);
- graphHistory.setLast(lastDataPointMillis);
- graphHistory.setInterval((int) (dataPointInterval
- / DateTimeHelper.ONE_SECOND));
- graphHistory.setFactor(factor);
- graphHistory.setCount(count);
- int previousNonNullIndex = -2;
- boolean foundTwoAdjacentDataPoints = false;
- List<Integer> values = new ArrayList<>();
- for (int j = firstNonNullIndex; j <= lastNonNullIndex; j++) {
- long dataPoint = dataPoints.get(j);
- if (dataPoint >= 0L) {
- if (j - previousNonNullIndex == 1) {
- foundTwoAdjacentDataPoints = true;
- }
- previousNonNullIndex = j;
- }
- values.add(dataPoint < 0L ? null
- : (int) ((dataPoint * 999L) / maxValue));
- }
- graphHistory.setValues(values);
- if (foundTwoAdjacentDataPoints) {
- graphs.put(graphName, graphHistory);
- }
+ ghc.addGraphType(this.graphNames[i], this.graphIntervals[i],
+ this.dataPointIntervals[i]);
+ }
+ for (long[] v : history.values()) {
+ ghc.addHistoryEntry(v[0], v[1],
+ (double) (v[2] * DateTimeHelper.ONE_SECOND));
}
- return graphs;
+ return ghc.compileGraphHistories();
}
@Override
diff --git a/src/main/java/org/torproject/onionoo/writer/ClientsDocumentWriter.java b/src/main/java/org/torproject/onionoo/writer/ClientsDocumentWriter.java
index 4eca33a..aba45cf 100644
--- a/src/main/java/org/torproject/onionoo/writer/ClientsDocumentWriter.java
+++ b/src/main/java/org/torproject/onionoo/writer/ClientsDocumentWriter.java
@@ -9,7 +9,6 @@ import org.torproject.onionoo.docs.ClientsStatus;
import org.torproject.onionoo.docs.DateTimeHelper;
import org.torproject.onionoo.docs.DocumentStore;
import org.torproject.onionoo.docs.DocumentStoreFactory;
-import org.torproject.onionoo.docs.GraphHistory;
import org.torproject.onionoo.docs.NodeStatus;
import org.torproject.onionoo.docs.UpdateStatus;
import org.torproject.onionoo.util.FormattingUtils;
@@ -17,13 +16,7 @@ import org.torproject.onionoo.util.FormattingUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
-import java.time.LocalDateTime;
import java.time.Period;
-import java.time.ZoneOffset;
-import java.util.ArrayList;
-import java.util.LinkedHashMap;
-import java.util.List;
-import java.util.Map;
import java.util.SortedSet;
/*
@@ -115,122 +108,19 @@ public class ClientsDocumentWriter implements DocumentWriter {
NodeStatus nodeStatus, SortedSet<ClientsHistory> history) {
ClientsDocument clientsDocument = new ClientsDocument();
clientsDocument.setFingerprint(hashedFingerprint);
- Map<String, GraphHistory> averageClients = new LinkedHashMap<>();
- for (int graphIntervalIndex = 0; graphIntervalIndex
- < this.graphIntervals.length; graphIntervalIndex++) {
- String graphName = this.graphNames[graphIntervalIndex];
- GraphHistory graphHistory = this.compileClientsHistory(
- graphIntervalIndex, history, nodeStatus.getLastSeenMillis());
- if (graphHistory != null) {
- averageClients.put(graphName, graphHistory);
- }
+ GraphHistoryCompiler ghc = new GraphHistoryCompiler(
+ nodeStatus.getLastSeenMillis() + DateTimeHelper.ONE_HOUR);
+ ghc.setThreshold(2L);
+ for (int i = 0; i < this.graphIntervals.length; i++) {
+ ghc.addGraphType(this.graphNames[i], this.graphIntervals[i],
+ this.dataPointIntervals[i]);
}
- clientsDocument.setAverageClients(averageClients);
- return clientsDocument;
- }
-
- private GraphHistory compileClientsHistory(
- int graphIntervalIndex, SortedSet<ClientsHistory> history,
- long lastSeenMillis) {
- Period graphInterval = this.graphIntervals[graphIntervalIndex];
- long dataPointInterval =
- this.dataPointIntervals[graphIntervalIndex];
- List<Double> dataPoints = new ArrayList<>();
- long graphEndMillis = ((lastSeenMillis + DateTimeHelper.ONE_HOUR)
- / dataPointInterval) * dataPointInterval;
- long graphStartMillis = LocalDateTime
- .ofEpochSecond(graphEndMillis / 1000L, 0, ZoneOffset.UTC)
- .minus(graphInterval)
- .toEpochSecond(ZoneOffset.UTC) * 1000L;
- long intervalStartMillis = graphStartMillis;
- long millis = 0L;
- double responses = 0.0;
for (ClientsHistory hist : history) {
- if (hist.getEndMillis() <= intervalStartMillis) {
- continue;
- } else if (hist.getEndMillis() > graphEndMillis) {
- break;
- }
- while ((intervalStartMillis / dataPointInterval)
- != ((hist.getEndMillis() - 1L) / dataPointInterval)) {
- dataPoints.add(millis * 2L < dataPointInterval
- ? -1.0 : responses * ((double) DateTimeHelper.ONE_DAY)
- / (((double) millis) * 10.0));
- responses = 0.0;
- millis = 0L;
- intervalStartMillis += dataPointInterval;
- }
- responses += hist.getTotalResponses();
- millis += (hist.getEndMillis() - hist.getStartMillis());
- }
- dataPoints.add(millis * 2L < dataPointInterval
- ? -1.0 : responses * ((double) DateTimeHelper.ONE_DAY)
- / (((double) millis) * 10.0));
- double maxValue = 0.0;
- int firstNonNullIndex = -1;
- int lastNonNullIndex = -1;
- for (int dataPointIndex = 0; dataPointIndex < dataPoints.size();
- dataPointIndex++) {
- double dataPoint = dataPoints.get(dataPointIndex);
- if (dataPoint >= 0.0) {
- if (firstNonNullIndex < 0) {
- firstNonNullIndex = dataPointIndex;
- }
- lastNonNullIndex = dataPointIndex;
- if (dataPoint > maxValue) {
- maxValue = dataPoint;
- }
- }
- }
- if (firstNonNullIndex < 0) {
- /* Not a single non-negative value in the data points. */
- return null;
- }
- long firstDataPointMillis = graphStartMillis + firstNonNullIndex
- * dataPointInterval + dataPointInterval / 2L;
- if (graphIntervalIndex > 0 && firstDataPointMillis >= LocalDateTime
- .ofEpochSecond(graphEndMillis / 1000L, 0, ZoneOffset.UTC)
- .minus(graphIntervals[graphIntervalIndex - 1])
- .toEpochSecond(ZoneOffset.UTC) * 1000L) {
- /* Skip clients history object, because it doesn't contain
- * anything new that wasn't already contained in the last
- * clients history object(s). */
- return null;
- }
- long lastDataPointMillis = firstDataPointMillis
- + (lastNonNullIndex - firstNonNullIndex) * dataPointInterval;
- double factor = ((double) maxValue) / 999.0;
- int count = lastNonNullIndex - firstNonNullIndex + 1;
- GraphHistory graphHistory = new GraphHistory();
- graphHistory.setFirst(firstDataPointMillis);
- graphHistory.setLast(lastDataPointMillis);
- graphHistory.setInterval((int) (dataPointInterval
- / DateTimeHelper.ONE_SECOND));
- graphHistory.setFactor(factor);
- graphHistory.setCount(count);
- int previousNonNullIndex = -2;
- boolean foundTwoAdjacentDataPoints = false;
- List<Integer> values = new ArrayList<>();
- for (int dataPointIndex = firstNonNullIndex; dataPointIndex
- <= lastNonNullIndex; dataPointIndex++) {
- double dataPoint = dataPoints.get(dataPointIndex);
- if (dataPoint >= 0.0) {
- if (dataPointIndex - previousNonNullIndex == 1) {
- foundTwoAdjacentDataPoints = true;
- }
- previousNonNullIndex = dataPointIndex;
- }
- values.add(dataPoint < 0.0 ? null :
- (int) ((dataPoint * 999.0) / maxValue));
- }
- graphHistory.setValues(values);
- if (foundTwoAdjacentDataPoints) {
- return graphHistory;
- } else {
- /* There are no two adjacent values in the data points that are
- * required to draw a line graph. */
- return null;
+ ghc.addHistoryEntry(hist.getStartMillis(), hist.getEndMillis(),
+ hist.getTotalResponses() * ((double) DateTimeHelper.ONE_DAY) / 10.0);
}
+ clientsDocument.setAverageClients(ghc.compileGraphHistories());
+ return clientsDocument;
}
@Override
diff --git a/src/main/java/org/torproject/onionoo/writer/GraphHistoryCompiler.java b/src/main/java/org/torproject/onionoo/writer/GraphHistoryCompiler.java
new file mode 100644
index 0000000..853ab32
--- /dev/null
+++ b/src/main/java/org/torproject/onionoo/writer/GraphHistoryCompiler.java
@@ -0,0 +1,254 @@
+/* Copyright 2018 The Tor Project
+ * See LICENSE for licensing information */
+
+package org.torproject.onionoo.writer;
+
+import org.torproject.onionoo.docs.DateTimeHelper;
+import org.torproject.onionoo.docs.GraphHistory;
+
+import java.time.LocalDateTime;
+import java.time.Period;
+import java.time.ZoneOffset;
+import java.util.ArrayList;
+import java.util.LinkedHashMap;
+import java.util.List;
+import java.util.Map;
+
+/** Helper class to compile graph histories. */
+public class GraphHistoryCompiler {
+
+ private long graphsEndMillis;
+
+ /**
+ * Instantiates a new graph history compiler with the provided end time for
+ * all compiled graphs.
+ *
+ * @param graphsEndMillis End time for all compiled graphs.
+ */
+ GraphHistoryCompiler(long graphsEndMillis) {
+ this.graphsEndMillis = graphsEndMillis;
+ }
+
+ private boolean divisible = false;
+
+ /**
+ * Set whether history elements are divisible in the sense that they may be
+ * longer than one data point; this is the case for uptime intervals where
+ * uptime is equally distributed over potentially many data point intervals,
+ * but it's not the case for bandwidth/weights/clients intervals where
+ * observations are given for fixed-size reporting intervals. */
+ void setDivisible(boolean divisible) {
+ this.divisible = divisible;
+ }
+
+ private long threshold = 5;
+
+ /**
+ * Set the threshold (given as reciprocal value) of available history entries
+ * for any given data points below which the data point will be counted as
+ * null (missing); default is 5 for 1/5 = 20%.
+ */
+ void setThreshold(long threshold) {
+ this.threshold = threshold;
+ }
+
+ private List<String> graphNames = new ArrayList<>();
+
+ private List<Period> graphIntervals = new ArrayList<>();
+
+ private List<Long> dataPointIntervals = new ArrayList<>();
+
+ /**
+ * Add a graph type with the given graph name, graph interval, and data point
+ * interval.
+ *
+ * @param graphName Graph name, like "1_week".
+ * @param graphInterval Graph interval, like Period.ofWeeks(1).
+ * @param dataPointInterval Data point interval, like 1 hour in milliseconds.
+ */
+ void addGraphType(String graphName, Period graphInterval,
+ Long dataPointInterval) {
+ this.graphNames.add(graphName);
+ this.graphIntervals.add(graphInterval);
+ this.dataPointIntervals.add(dataPointInterval);
+ }
+
+ private Map<long[], Double> history = new LinkedHashMap<>();
+
+ /**
+ * Add a history entry with given start and end time and value.
+ *
+ * @param startMillis Start time in milliseconds.
+ * @param endMillis End time in milliseconds.
+ * @param value History entry value.
+ */
+ void addHistoryEntry(long startMillis, long endMillis, double value) {
+ this.history.put(new long[] { startMillis, endMillis }, value);
+ }
+
+ /**
+ * Compile graph histories from the history entries provided earlier.
+ *
+ * @return Map with graph names as keys and GraphHistory instances as values.
+ */
+ Map<String, GraphHistory> compileGraphHistories() {
+ Map<String, GraphHistory> graphs = new LinkedHashMap<>();
+ for (int graphIntervalIndex = 0;
+ graphIntervalIndex < this.graphIntervals.size();
+ graphIntervalIndex++) {
+
+ /* Look up graph name, graph interval, and data point interval from the
+ * graph type details provided earlier. */
+ final String graphName = this.graphNames.get(graphIntervalIndex);
+ Period graphInterval = this.graphIntervals.get(graphIntervalIndex);
+ long dataPointInterval = this.dataPointIntervals.get(graphIntervalIndex);
+
+ /* Determine graph end time as the end time for all graphs, rounded down
+ * to the last full data point interval. */
+ long graphEndMillis = (this.graphsEndMillis / dataPointInterval)
+ * dataPointInterval;
+
+ /* Determine graph start time as graph end time minus graph interval,
+ * rounded down to the last full data point interval. */
+ long graphStartMillis = ((LocalDateTime
+ .ofEpochSecond(graphEndMillis / 1000L, 0, ZoneOffset.UTC)
+ .minus(graphInterval)
+ .toEpochSecond(ZoneOffset.UTC) * 1000L) / dataPointInterval)
+ * dataPointInterval;
+
+ /* Keep input for graph values in two arrays, one for values * millis,
+ * another one for millis. */
+ int dataPoints = (int) ((graphEndMillis - graphStartMillis)
+ / dataPointInterval);
+ double[] totalValues = new double[dataPoints];
+ long[] totalMillis = new long[dataPoints];
+
+ /* Iterate over all history entries and see which ones we need for this
+ * graph. */
+ for (Map.Entry<long[], Double> h : this.history.entrySet()) {
+ long startMillis = h.getKey()[0];
+ long endMillis = h.getKey()[1];
+ double value = h.getValue();
+
+ /* If a history entry ends before this graph starts or starts before
+ * this graph ends, skip it. */
+ if (endMillis <= graphStartMillis || startMillis >= graphEndMillis) {
+ continue;
+ }
+
+ /* If history entries are not divisible and this entry is longer than
+ * the data point interval, skip it. Maybe the next graph will contain
+ * it, but not this one. */
+ if (!this.divisible && endMillis - startMillis > dataPointInterval) {
+ continue;
+ }
+
+ /* Iterate over all data points that this history element falls into.
+ * Even if history entries are not divisible, we may have to split it
+ * over two data points, because reported statistics rarely align with
+ * our data point intervals. And if history entries are divisible, we
+ * may have to split them over many data points. */
+ for (long intervalStartMillis = startMillis;
+ intervalStartMillis < endMillis;
+ intervalStartMillis = ((intervalStartMillis + dataPointInterval)
+ / dataPointInterval) * dataPointInterval) {
+
+ /* Determine the data point that this (partial) history entry falls
+ * into. And if it's out of bounds, skip it. */
+ int dataPointIndex = (int) ((intervalStartMillis - graphStartMillis)
+ / dataPointInterval);
+ if (dataPointIndex < 0 || dataPointIndex >= dataPoints) {
+ continue;
+ }
+
+ /* Determine the interval end, which may be the end of the data point
+ * or the end of the history entry, whichever comes first. Then add
+ * values and millis to the data point. */
+ long intervalEndMillis = Math.min(endMillis, ((intervalStartMillis
+ + dataPointInterval) / dataPointInterval) * dataPointInterval);
+ long millis = intervalEndMillis - intervalStartMillis;
+ totalValues[dataPointIndex] += (value * (double) millis)
+ / (double) (endMillis - startMillis);
+ totalMillis[dataPointIndex] += millis;
+ }
+ }
+
+ /* Go through the previously compiled data points and extract some pieces
+ * that will be relevant for deciding whether to include this graph and
+ * for adding meta data to the GraphHistory object. */
+ double maxValue = 0.0;
+ int firstNonNullIndex = -1;
+ int lastNonNullIndex = -1;
+ boolean foundTwoAdjacentDataPoints = false;
+ for (int dataPointIndex = 0, previousNonNullIndex = -2;
+ dataPointIndex < dataPoints; dataPointIndex++) {
+
+ /* Only consider data points containing values for at least the given
+ * threshold of time (20% by default). If so, record first and last
+ * data point containing data, whether there exist two adjacent data
+ * points containing data, and determine the maximum value. */
+ if (totalMillis[dataPointIndex] * this.threshold >= dataPointInterval) {
+ if (firstNonNullIndex < 0) {
+ firstNonNullIndex = dataPointIndex;
+ }
+ lastNonNullIndex = dataPointIndex;
+ if (dataPointIndex - previousNonNullIndex == 1) {
+ foundTwoAdjacentDataPoints = true;
+ }
+ previousNonNullIndex = dataPointIndex;
+ maxValue = Math.max(maxValue, totalValues[dataPointIndex]
+ / totalMillis[dataPointIndex]);
+ }
+ }
+
+ /* If there are not at least two adjacent data points containing data,
+ * skip the graph. */
+ if (!foundTwoAdjacentDataPoints) {
+ continue;
+ }
+
+ /* Calculate the timestamp of the first data point containing data. */
+ long firstDataPointMillis = graphStartMillis + firstNonNullIndex
+ * dataPointInterval + dataPointInterval / 2L;
+
+ /* If the graph doesn't contain anything new that wasn't already contained
+ * in previously compiled graphs, skip this graph. */
+ if (graphIntervalIndex > 0 && !graphs.isEmpty()
+ && firstDataPointMillis >= LocalDateTime.ofEpochSecond(
+ graphEndMillis / 1000L, 0, ZoneOffset.UTC)
+ .minus(this.graphIntervals.get(graphIntervalIndex - 1))
+ .toEpochSecond(ZoneOffset.UTC) * 1000L) {
+ continue;
+ }
+
+ /* Put together the list of values that will go into the graph. */
+ List<Integer> values = new ArrayList<>();
+ for (int dataPointIndex = firstNonNullIndex;
+ dataPointIndex <= lastNonNullIndex; dataPointIndex++) {
+ if (totalMillis[dataPointIndex] * this.threshold >= dataPointInterval) {
+ values.add((int) ((totalValues[dataPointIndex] * 999.0)
+ / (maxValue * totalMillis[dataPointIndex])));
+ } else {
+ values.add(null);
+ }
+ }
+
+ /* Put together a GraphHistory object and add it to the map under the
+ * given graph name. */
+ GraphHistory graphHistory = new GraphHistory();
+ graphHistory.setFirst(firstDataPointMillis);
+ graphHistory.setLast(firstDataPointMillis + (lastNonNullIndex
+ - firstNonNullIndex) * dataPointInterval);
+ graphHistory.setInterval((int) (dataPointInterval
+ / DateTimeHelper.ONE_SECOND));
+ graphHistory.setFactor(maxValue / 999.0);
+ graphHistory.setCount(lastNonNullIndex - firstNonNullIndex + 1);
+ graphHistory.setValues(values);
+ graphs.put(graphName, graphHistory);
+ }
+
+ /* We're done. Return the map of compiled graphs. */
+ return graphs;
+ }
+}
+
diff --git a/src/main/java/org/torproject/onionoo/writer/UptimeDocumentWriter.java b/src/main/java/org/torproject/onionoo/writer/UptimeDocumentWriter.java
index d1e2003..12ba8fa 100644
--- a/src/main/java/org/torproject/onionoo/writer/UptimeDocumentWriter.java
+++ b/src/main/java/org/torproject/onionoo/writer/UptimeDocumentWriter.java
@@ -17,12 +17,8 @@ import org.torproject.onionoo.util.FormattingUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
-import java.time.LocalDateTime;
import java.time.Period;
-import java.time.ZoneOffset;
-import java.util.ArrayList;
-import java.util.LinkedHashMap;
-import java.util.List;
+import java.util.Iterator;
import java.util.Map;
import java.util.SortedMap;
import java.util.SortedSet;
@@ -110,18 +106,8 @@ public class UptimeDocumentWriter implements DocumentWriter {
SortedSet<UptimeHistory> knownStatuses, long lastSeenMillis) {
UptimeDocument uptimeDocument = new UptimeDocument();
uptimeDocument.setFingerprint(fingerprint);
- Map<String, GraphHistory> uptime = new LinkedHashMap<>();
- for (int graphIntervalIndex = 0; graphIntervalIndex
- < this.graphIntervals.length; graphIntervalIndex++) {
- String graphName = this.graphNames[graphIntervalIndex];
- GraphHistory graphHistory = this.compileUptimeHistory(
- graphIntervalIndex, relay, history, knownStatuses, lastSeenMillis,
- null);
- if (graphHistory != null) {
- uptime.put(graphName, graphHistory);
- }
- }
- uptimeDocument.setUptime(uptime);
+ uptimeDocument.setUptime(this.compileUptimeHistory(relay, history,
+ knownStatuses, lastSeenMillis, null));
SortedMap<String, Map<String, GraphHistory>> flags = new TreeMap<>();
SortedSet<String> allFlags = new TreeSet<>();
for (UptimeHistory hist : history) {
@@ -130,17 +116,8 @@ public class UptimeDocumentWriter implements DocumentWriter {
}
}
for (String flag : allFlags) {
- Map<String, GraphHistory> graphsForFlags = new LinkedHashMap<>();
- for (int graphIntervalIndex = 0; graphIntervalIndex
- < this.graphIntervals.length; graphIntervalIndex++) {
- String graphName = this.graphNames[graphIntervalIndex];
- GraphHistory graphHistory = this.compileUptimeHistory(
- graphIntervalIndex, relay, history, knownStatuses, lastSeenMillis,
- flag);
- if (graphHistory != null) {
- graphsForFlags.put(graphName, graphHistory);
- }
- }
+ Map<String, GraphHistory> graphsForFlags = this.compileUptimeHistory(
+ relay, history, knownStatuses, lastSeenMillis, flag);
if (!graphsForFlags.isEmpty()) {
flags.put(flag, graphsForFlags);
}
@@ -151,187 +128,116 @@ public class UptimeDocumentWriter implements DocumentWriter {
return uptimeDocument;
}
- private GraphHistory compileUptimeHistory(int graphIntervalIndex,
- boolean relay, SortedSet<UptimeHistory> history,
- SortedSet<UptimeHistory> knownStatuses, long lastSeenMillis,
- String flag) {
- Period graphInterval = this.graphIntervals[graphIntervalIndex];
- long dataPointInterval =
- this.dataPointIntervals[graphIntervalIndex];
- int dataPointIntervalHours = (int) (dataPointInterval
- / DateTimeHelper.ONE_HOUR);
- List<Integer> uptimeDataPoints = new ArrayList<>();
- long graphEndMillis = ((lastSeenMillis + DateTimeHelper.ONE_HOUR)
- / dataPointInterval) * dataPointInterval;
- long graphStartMillis = LocalDateTime
- .ofEpochSecond(graphEndMillis / 1000L, 0, ZoneOffset.UTC)
- .minus(graphInterval)
- .toEpochSecond(ZoneOffset.UTC) * 1000L;
- long intervalStartMillis = graphStartMillis;
- int uptimeHours = 0;
- long firstStatusStartMillis = -1L;
- for (UptimeHistory hist : history) {
- if (hist.isRelay() != relay
- || (flag != null && (hist.getFlags() == null
- || !hist.getFlags().contains(flag)))) {
- continue;
- }
- if (firstStatusStartMillis < 0L) {
- firstStatusStartMillis = hist.getStartMillis();
- }
- long histEndMillis = hist.getStartMillis() + DateTimeHelper.ONE_HOUR
- * hist.getUptimeHours();
- if (histEndMillis <= intervalStartMillis) {
- continue;
- } else if (histEndMillis > graphEndMillis) {
- histEndMillis = graphEndMillis;
- }
- while (hist.getStartMillis() >= intervalStartMillis
- + dataPointInterval) {
- if (firstStatusStartMillis < intervalStartMillis
- + dataPointInterval) {
- uptimeDataPoints.add(uptimeHours);
- } else {
- uptimeDataPoints.add(-1);
- }
- uptimeHours = 0;
- intervalStartMillis += dataPointInterval;
- }
- while (histEndMillis >= intervalStartMillis + dataPointInterval) {
- uptimeHours += (int) ((intervalStartMillis + dataPointInterval
- - Math.max(hist.getStartMillis(), intervalStartMillis))
- / DateTimeHelper.ONE_HOUR);
- uptimeDataPoints.add(uptimeHours);
- uptimeHours = 0;
- intervalStartMillis += dataPointInterval;
- }
- uptimeHours += (int) ((histEndMillis - Math.max(
- hist.getStartMillis(), intervalStartMillis))
- / DateTimeHelper.ONE_HOUR);
- }
- uptimeDataPoints.add(uptimeHours);
- List<Integer> statusDataPoints = new ArrayList<>();
- intervalStartMillis = graphStartMillis;
- int statusHours = -1;
- for (UptimeHistory hist : knownStatuses) {
- if (hist.getStartMillis() >= graphEndMillis) {
- break;
- }
- if (hist.isRelay() != relay
- || (flag != null && (hist.getFlags() == null
- || !hist.getFlags().contains(flag)))) {
- continue;
- }
- long histEndMillis = hist.getStartMillis() + DateTimeHelper.ONE_HOUR
- * hist.getUptimeHours();
- if (histEndMillis <= intervalStartMillis) {
- continue;
- } else if (histEndMillis > graphEndMillis) {
- histEndMillis = graphEndMillis;
- }
- while (hist.getStartMillis() >= intervalStartMillis
- + dataPointInterval) {
- statusDataPoints.add(statusHours * 5 < dataPointIntervalHours
- ? -1 : statusHours);
- statusHours = -1;
- intervalStartMillis += dataPointInterval;
- }
- while (histEndMillis >= intervalStartMillis + dataPointInterval) {
- if (statusHours < 0) {
- statusHours = 0;
- }
- statusHours += (int) ((intervalStartMillis + dataPointInterval
- - Math.max(Math.max(hist.getStartMillis(),
- firstStatusStartMillis), intervalStartMillis))
- / DateTimeHelper.ONE_HOUR);
- statusDataPoints.add(statusHours * 5 < dataPointIntervalHours
- ? -1 : statusHours);
- statusHours = -1;
- intervalStartMillis += dataPointInterval;
- }
- if (statusHours < 0) {
- statusHours = 0;
- }
- statusHours += (int) ((histEndMillis - Math.max(Math.max(
- hist.getStartMillis(), firstStatusStartMillis),
- intervalStartMillis)) / DateTimeHelper.ONE_HOUR);
- }
- if (statusHours > 0) {
- statusDataPoints.add(statusHours * 5 < dataPointIntervalHours
- ? -1 : statusHours);
- }
- List<Double> dataPoints = new ArrayList<>();
- for (int dataPointIndex = 0; dataPointIndex < statusDataPoints.size();
- dataPointIndex++) {
- if (dataPointIndex >= uptimeDataPoints.size()) {
- dataPoints.add(0.0);
- } else if (uptimeDataPoints.get(dataPointIndex) >= 0
- && statusDataPoints.get(dataPointIndex) > 0) {
- dataPoints.add(((double) uptimeDataPoints.get(dataPointIndex))
- / ((double) statusDataPoints.get(dataPointIndex)));
- } else {
- dataPoints.add(-1.0);
- }
- }
- int firstNonNullIndex = -1;
- int lastNonNullIndex = -1;
- for (int dataPointIndex = 0; dataPointIndex < dataPoints.size();
- dataPointIndex++) {
- double dataPoint = dataPoints.get(dataPointIndex);
- if (dataPoint >= 0.0) {
- if (firstNonNullIndex < 0) {
- firstNonNullIndex = dataPointIndex;
- }
- lastNonNullIndex = dataPointIndex;
- }
- }
- if (firstNonNullIndex < 0) {
- /* Not a single non-negative value in the data points. */
+ private Map<String, GraphHistory> compileUptimeHistory(boolean relay,
+ SortedSet<UptimeHistory> history, SortedSet<UptimeHistory> knownStatuses,
+ long lastSeenMillis, String flag) {
+
+ /* Extracting history entries for compiling GraphHistory objects is a bit
+ * harder than for the other document types. The reason is that we have to
+ * combine (A) uptime history of all relays/bridges and (B) uptime history
+ * of the relay/bridge that we're writing the document for. We're going to
+ * refer to A and B below, to simplify descriptions a bit. */
+
+ /* If there are either no A entries or no B entries, we can't compile
+ * graphs. */
+ if (history.isEmpty() || knownStatuses.isEmpty()) {
return null;
}
- long firstDataPointMillis = graphStartMillis + firstNonNullIndex
- * dataPointInterval + dataPointInterval / 2L;
- if (graphIntervalIndex > 0 && firstDataPointMillis >= LocalDateTime
- .ofEpochSecond(graphEndMillis / 1000L, 0, ZoneOffset.UTC)
- .minus(graphIntervals[graphIntervalIndex - 1])
- .toEpochSecond(ZoneOffset.UTC) * 1000L) {
- /* Skip uptime history object, because it doesn't contain
- * anything new that wasn't already contained in the last
- * uptime history object(s). */
+
+ /* Initialize the graph history compiler, and tell it that history entries
+ * are divisible. This is different from the other history writers. */
+ GraphHistoryCompiler ghc = new GraphHistoryCompiler(
+ lastSeenMillis + DateTimeHelper.ONE_HOUR);
+ for (int i = 0; i < this.graphIntervals.length; i++) {
+ ghc.addGraphType(this.graphNames[i], this.graphIntervals[i],
+ this.dataPointIntervals[i]);
+ }
+ ghc.setDivisible(true);
+
+ /* The general idea for extracting history entries and passing them to the
+ * graph history compiler is to iterate over A entries one by one and keep
+ * an Iterator for B entries to move forward as "time" proceeds. */
+ Iterator<UptimeHistory> historyIterator = history.iterator();
+ UptimeHistory hist;
+ do {
+ hist = historyIterator.hasNext() ? historyIterator.next() : null;
+ } while (null != hist && (hist.isRelay() != relay
+ || (null != flag && (null == hist.getFlags()
+ || !hist.getFlags().contains(flag)))));
+
+ /* If there is not at least one B entry, we can't compile graphs. */
+ if (null == hist) {
return null;
}
- long lastDataPointMillis = firstDataPointMillis
- + (lastNonNullIndex - firstNonNullIndex) * dataPointInterval;
- int count = lastNonNullIndex - firstNonNullIndex + 1;
- GraphHistory graphHistory = new GraphHistory();
- graphHistory.setFirst(firstDataPointMillis);
- graphHistory.setLast(lastDataPointMillis);
- graphHistory.setInterval((int) (dataPointInterval
- / DateTimeHelper.ONE_SECOND));
- graphHistory.setFactor(1.0 / 999.0);
- graphHistory.setCount(count);
- int previousNonNullIndex = -2;
- boolean foundTwoAdjacentDataPoints = false;
- List<Integer> values = new ArrayList<>();
- for (int dataPointIndex = firstNonNullIndex; dataPointIndex
- <= lastNonNullIndex; dataPointIndex++) {
- double dataPoint = dataPoints.get(dataPointIndex);
- if (dataPoint >= 0.0) {
- if (dataPointIndex - previousNonNullIndex == 1) {
- foundTwoAdjacentDataPoints = true;
- }
- previousNonNullIndex = dataPointIndex;
+
+ for (UptimeHistory statuses : knownStatuses) {
+
+ /* If this A entry contains uptime information that we're not interested
+ * in, skip it. */
+ if (statuses.isRelay() != relay
+ || (null != flag && (null == statuses.getFlags()
+ || !statuses.getFlags().contains(flag)))) {
+ continue;
}
- values.add(dataPoint < -0.5 ? null : ((int) (dataPoint * 999.0)));
- }
- graphHistory.setValues(values);
- if (foundTwoAdjacentDataPoints) {
- return graphHistory;
- } else {
- /* There are no two adjacent values in the data points that are
- * required to draw a line graph. */
- return null;
+
+ /* The "current" time is the time that we're currently considering as part
+ * of the A entry. It starts out as the interval start, but as we may
+ * consider multiple B entries, it may proceed. The loop ends when
+ * "current" time has reached the end of the considered A entry. */
+ long currentTimeMillis = statuses.getStartMillis();
+ do {
+ if (null == hist) {
+
+ /* There is no B entry left, which means that the relay/bridge was
+ * offline from "current" time to the end of the A entry. */
+ ghc.addHistoryEntry(currentTimeMillis, statuses.getEndMillis(),0.0);
+ currentTimeMillis = statuses.getEndMillis();
+ } else if (statuses.getEndMillis() <= hist.getStartMillis()) {
+
+ /* This A entry ends before the B entry starts. If there was an
+ * earlier B entry, count this time as offline time. */
+ if (history.first().getStartMillis() <= currentTimeMillis) {
+ ghc.addHistoryEntry(currentTimeMillis, statuses.getEndMillis(),
+ 0.0);
+ }
+ currentTimeMillis = statuses.getEndMillis();
+ } else {
+
+ /* A and B entries overlap. First, if there's time between "current"
+ * time and the time when B starts, possibly count that as offline
+ * time, but only if the relay was around earlier. */
+ if (currentTimeMillis < hist.getStartMillis()) {
+ if (history.first().getStartMillis() <= currentTimeMillis) {
+ ghc.addHistoryEntry(currentTimeMillis, hist.getStartMillis(),
+ 0.0);
+ }
+ currentTimeMillis = hist.getStartMillis();
+ }
+
+ /* Now handle the actually overlapping part. First determine when the
+ * overlap ends, then add a history entry with the number of uptime
+ * milliseconds as value. */
+ long overlapEndMillis = Math.min(statuses.getEndMillis(),
+ hist.getEndMillis());
+ ghc.addHistoryEntry(currentTimeMillis, overlapEndMillis,
+ overlapEndMillis - currentTimeMillis);
+ currentTimeMillis = overlapEndMillis;
+
+ /* If A ends after B, move on to the next B entry. */
+ if (statuses.getEndMillis() >= hist.getEndMillis()) {
+ do {
+ hist = historyIterator.hasNext() ? historyIterator.next() : null;
+ } while (null != hist && (hist.isRelay() != relay
+ || (null != flag && (null == hist.getFlags()
+ || !hist.getFlags().contains(flag)))));
+ }
+ }
+ } while (currentTimeMillis < statuses.getEndMillis());
}
+
+ /* Now that the graph history compiler knows all relevant history, ask it to
+ * compile graphs for us, and return them. */
+ return ghc.compileGraphHistories();
}
@Override
diff --git a/src/main/java/org/torproject/onionoo/writer/WeightsDocumentWriter.java b/src/main/java/org/torproject/onionoo/writer/WeightsDocumentWriter.java
index f4e2c3a..b34a9e6 100644
--- a/src/main/java/org/torproject/onionoo/writer/WeightsDocumentWriter.java
+++ b/src/main/java/org/torproject/onionoo/writer/WeightsDocumentWriter.java
@@ -15,12 +15,7 @@ import org.torproject.onionoo.docs.WeightsStatus;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
-import java.time.LocalDateTime;
import java.time.Period;
-import java.time.ZoneOffset;
-import java.util.ArrayList;
-import java.util.LinkedHashMap;
-import java.util.List;
import java.util.Map;
import java.util.SortedMap;
import java.util.SortedSet;
@@ -105,125 +100,22 @@ public class WeightsDocumentWriter implements DocumentWriter {
private Map<String, GraphHistory> compileGraphType(
SortedMap<long[], double[]> history, long lastSeenMillis,
int graphTypeIndex) {
- Map<String, GraphHistory> graphs = new LinkedHashMap<>();
- for (int graphIntervalIndex = 0; graphIntervalIndex
- < this.graphIntervals.length; graphIntervalIndex++) {
- String graphName = this.graphNames[graphIntervalIndex];
- GraphHistory graphHistory = this.compileWeightsHistory(
- graphTypeIndex, graphIntervalIndex, history, lastSeenMillis);
- if (graphHistory != null) {
- graphs.put(graphName, graphHistory);
- }
+ GraphHistoryCompiler ghc = new GraphHistoryCompiler(
+ lastSeenMillis + DateTimeHelper.ONE_HOUR);
+ for (int i = 0; i < this.graphIntervals.length; i++) {
+ ghc.addGraphType(this.graphNames[i], this.graphIntervals[i],
+ this.dataPointIntervals[i]);
}
- return graphs;
- }
-
- private GraphHistory compileWeightsHistory(int graphTypeIndex,
- int graphIntervalIndex, SortedMap<long[], double[]> history,
- long lastSeenMillis) {
- Period graphInterval = this.graphIntervals[graphIntervalIndex];
- long dataPointInterval =
- this.dataPointIntervals[graphIntervalIndex];
- List<Double> dataPoints = new ArrayList<>();
- long graphEndMillis = ((lastSeenMillis + DateTimeHelper.ONE_HOUR)
- / dataPointInterval) * dataPointInterval;
- long graphStartMillis = LocalDateTime
- .ofEpochSecond(graphEndMillis / 1000L, 0, ZoneOffset.UTC)
- .minus(graphInterval)
- .toEpochSecond(ZoneOffset.UTC) * 1000L;
- long intervalStartMillis = graphStartMillis;
- long totalMillis = 0L;
- double totalWeightTimesMillis = 0.0;
for (Map.Entry<long[], double[]> e : history.entrySet()) {
long startMillis = e.getKey()[0];
long endMillis = e.getKey()[1];
double weight = e.getValue()[graphTypeIndex];
- if (endMillis <= intervalStartMillis) {
- continue;
- } else if (endMillis > graphEndMillis) {
- break;
- }
- while ((intervalStartMillis / dataPointInterval)
- != ((endMillis - 1L) / dataPointInterval)) {
- dataPoints.add(totalMillis * 5L < dataPointInterval
- ? -1.0 : totalWeightTimesMillis / (double) totalMillis);
- totalWeightTimesMillis = 0.0;
- totalMillis = 0L;
- intervalStartMillis += dataPointInterval;
- }
if (weight >= 0.0) {
- totalWeightTimesMillis += weight
- * ((double) (endMillis - startMillis));
- totalMillis += (endMillis - startMillis);
+ ghc.addHistoryEntry(startMillis, endMillis,
+ weight * ((double) (endMillis - startMillis)));
}
}
- dataPoints.add(totalMillis * 5L < dataPointInterval
- ? -1.0 : totalWeightTimesMillis / (double) totalMillis);
- double maxValue = 0.0;
- int firstNonNullIndex = -1;
- int lastNonNullIndex = -1;
- for (int dataPointIndex = 0; dataPointIndex < dataPoints.size();
- dataPointIndex++) {
- double dataPoint = dataPoints.get(dataPointIndex);
- if (dataPoint >= 0.0) {
- if (firstNonNullIndex < 0) {
- firstNonNullIndex = dataPointIndex;
- }
- lastNonNullIndex = dataPointIndex;
- if (dataPoint > maxValue) {
- maxValue = dataPoint;
- }
- }
- }
- if (firstNonNullIndex < 0) {
- /* Not a single non-negative value in the data points. */
- return null;
- }
- long firstDataPointMillis = graphStartMillis + firstNonNullIndex
- * dataPointInterval + dataPointInterval / 2L;
- if (graphIntervalIndex > 0 && firstDataPointMillis >= LocalDateTime
- .ofEpochSecond(graphEndMillis / 1000L, 0, ZoneOffset.UTC)
- .minus(graphIntervals[graphIntervalIndex - 1])
- .toEpochSecond(ZoneOffset.UTC) * 1000L) {
- /* Skip weights history object, because it doesn't contain
- * anything new that wasn't already contained in the last
- * weights history object(s). */
- return null;
- }
- long lastDataPointMillis = firstDataPointMillis
- + (lastNonNullIndex - firstNonNullIndex) * dataPointInterval;
- double factor = ((double) maxValue) / 999.0;
- int count = lastNonNullIndex - firstNonNullIndex + 1;
- GraphHistory graphHistory = new GraphHistory();
- graphHistory.setFirst(firstDataPointMillis);
- graphHistory.setLast(lastDataPointMillis);
- graphHistory.setInterval((int) (dataPointInterval
- / DateTimeHelper.ONE_SECOND));
- graphHistory.setFactor(factor);
- graphHistory.setCount(count);
- int previousNonNullIndex = -2;
- boolean foundTwoAdjacentDataPoints = false;
- List<Integer> values = new ArrayList<>();
- for (int dataPointIndex = firstNonNullIndex; dataPointIndex
- <= lastNonNullIndex; dataPointIndex++) {
- double dataPoint = dataPoints.get(dataPointIndex);
- if (dataPoint >= 0.0) {
- if (dataPointIndex - previousNonNullIndex == 1) {
- foundTwoAdjacentDataPoints = true;
- }
- previousNonNullIndex = dataPointIndex;
- }
- values.add(dataPoint < 0.0 ? null :
- (int) ((dataPoint * 999.0) / maxValue));
- }
- graphHistory.setValues(values);
- if (foundTwoAdjacentDataPoints) {
- return graphHistory;
- } else {
- /* There are no two adjacent values in the data points that are
- * required to draw a line graph. */
- return null;
- }
+ return ghc.compileGraphHistories();
}
@Override
diff --git a/src/test/java/org/torproject/onionoo/writer/BandwidthDocumentWriterTest.java b/src/test/java/org/torproject/onionoo/writer/BandwidthDocumentWriterTest.java
index 324122c..fa76699 100644
--- a/src/test/java/org/torproject/onionoo/writer/BandwidthDocumentWriterTest.java
+++ b/src/test/java/org/torproject/onionoo/writer/BandwidthDocumentWriterTest.java
@@ -74,9 +74,9 @@ public class BandwidthDocumentWriterTest {
assertEquals(1, document.getReadHistory().size());
assertTrue(document.getReadHistory().containsKey("1_month"));
GraphHistory history = document.getReadHistory().get("1_month");
- assertEquals(DateTimeHelper.parse(dayBeforeYesterday + " 14:00:00"),
+ assertEquals(DateTimeHelper.parse(dayBeforeYesterday + " 10:00:00"),
history.getFirst());
- assertEquals(DateTimeHelper.parse(yesterday + " 02:00:00"),
+ assertEquals(DateTimeHelper.parse(dayBeforeYesterday + " 22:00:00"),
history.getLast());
assertEquals(DateTimeHelper.FOUR_HOURS / DateTimeHelper.ONE_SECOND,
(int) history.getInterval());
diff --git a/src/test/java/org/torproject/onionoo/writer/GraphHistoryCompilerTest.java b/src/test/java/org/torproject/onionoo/writer/GraphHistoryCompilerTest.java
new file mode 100644
index 0000000..6d9c461
--- /dev/null
+++ b/src/test/java/org/torproject/onionoo/writer/GraphHistoryCompilerTest.java
@@ -0,0 +1,203 @@
+/* Copyright 2018 The Tor Project
+ * See LICENSE for licensing information */
+
+package org.torproject.onionoo.writer;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertNotNull;
+
+import org.torproject.onionoo.docs.DateTimeHelper;
+import org.torproject.onionoo.docs.GraphHistory;
+
+import com.google.gson.Gson;
+
+import org.junit.Test;
+import org.junit.runner.RunWith;
+import org.junit.runners.Parameterized;
+import org.junit.runners.Parameterized.Parameter;
+import org.junit.runners.Parameterized.Parameters;
+
+import java.time.Period;
+import java.util.Arrays;
+import java.util.Collection;
+import java.util.Map;
+
+@RunWith(Parameterized.class)
+public class GraphHistoryCompilerTest {
+
+ /** Provide test data. */
+ @Parameters
+ public static Collection<Object[]> data() {
+ return Arrays.asList(new Object[][] {
+ { "Empty history",
+ false, new String[0][], 0, null, null, null, null, null, null,
+ null },
+ { "Single entry right before graphs end",
+ false, new String[][] {
+ new String[] { "2017-12-31 23:00", "2018-01-01 00:00", "1" }},
+ 0, null, null, null, null, null, null, null },
+ { "Two consecutive entries right before graphs end",
+ false, new String[][] {
+ new String[] { "2017-12-31 22:00", "2017-12-31 23:00", "1" },
+ new String[] { "2017-12-31 23:00", "2018-01-01 00:00", "1" }},
+ 1, "1_week", "2017-12-31 22:30", "2017-12-31 23:30", 3600, 0.001, 2,
+ new Integer[] { 999, 999 } },
+ { "Two non-consecutive entries towards graphs end",
+ false, new String[][] {
+ new String[] { "2017-12-31 21:00", "2017-12-31 22:00", "1" },
+ new String[] { "2017-12-31 23:00", "2018-01-01 00:00", "1" }},
+ 0, null, null, null, null, null, null, null },
+ { "Two consecutive entries passing 1 week threshold",
+ false, new String[][] {
+ new String[] { "2017-12-24 23:00", "2017-12-25 00:00", "1" },
+ new String[] { "2017-12-25 00:00", "2017-12-25 01:00", "1" }},
+ 1, "1_month", "2017-12-24 22:00", "2017-12-25 02:00", 14400,
+ 2.7805583361138913E-10, 2, new Integer[] { 999, 999 } },
+ { "Two consecutive 1-hour entries over 1 week from graphs end",
+ false, new String[][] {
+ new String[] { "2017-12-21 22:00", "2017-12-21 23:00", "1" },
+ new String[] { "2017-12-21 23:00", "2017-12-22 00:00", "1" }},
+ 0, null, null, null, null, null, null, null },
+ { "Two consecutive 4-hour entries over 1 week from graphs end",
+ false, new String[][] {
+ new String[] { "2017-12-21 16:00", "2017-12-21 20:00", "1" },
+ new String[] { "2017-12-21 20:00", "2017-12-22 00:00", "1" }},
+ 1, "1_month", "2017-12-21 18:00", "2017-12-21 22:00", 14400, 0.001,
+ 2, new Integer[] { 999, 999 } },
+ { "Two consecutive 4-hour entries right before graphs end",
+ false, new String[][] {
+ new String[] { "2017-12-31 16:00", "2017-12-31 20:00", "1" },
+ new String[] { "2017-12-31 20:00", "2018-01-01 00:00", "1" }},
+ 1, "1_month", "2017-12-31 18:00", "2017-12-31 22:00", 14400, 0.001,
+ 2, new Integer[] { 999, 999 } },
+ { "Single 1-week divisible entry right before graphs end",
+ true, new String[][] {
+ new String[] { "2017-12-25 00:00", "2018-01-01 00:00", "1" }},
+ 1, "1_week", "2017-12-25 00:30", "2017-12-31 23:30", 3600, 0.001,
+ 168, null },
+ { "Single 1-week-and-1-hour divisible entry right before graphs end",
+ true, new String[][] {
+ new String[] { "2017-12-24 23:00", "2018-01-01 00:00", "1" }},
+ 2, "1_month", "2017-12-24 22:00", "2017-12-31 22:00", 14400, 0.001,
+ 43, null },
+ { "Single 66-minute divisible entry right before graphs end",
+ true, new String[][] {
+ new String[] { "2017-12-31 22:54", "2018-01-01 00:00", "1" }},
+ 0, null, null, null, null, null, null, null },
+ { "Single 72-minute divisible entry right before graphs end",
+ true, new String[][] {
+ new String[] { "2017-12-31 22:48", "2018-01-01 00:00", "1" }},
+ 1, "1_week", "2017-12-31 22:30", "2017-12-31 23:30", 3600, 0.001,
+ 2, null },
+ { "Single 6-month divisible entry 6 years before graphs end",
+ true, new String[][] {
+ new String[] { "2012-01-01 00:00", "2012-07-01 00:00", "1" }},
+ 0, null, null, null, null, null, null, null },
+ { "Two consecutive 1-hour entries right after graphs end",
+ false, new String[][] {
+ new String[] { "2018-01-01 00:00", "2018-01-01 01:00", "1" },
+ new String[] { "2018-01-01 01:00", "2018-01-01 02:00", "1" }},
+ 0, null, null, null, null, null, null, null }
+ });
+ }
+
+ @Parameter
+ public String testDescription;
+
+ @Parameter(1)
+ public boolean divisible;
+
+ @Parameter(2)
+ public String[][] historyEntries;
+
+ @Parameter(3)
+ public int expectedGraphs;
+
+ @Parameter(4)
+ public String expectedGraphName;
+
+ @Parameter(5)
+ public String expectedFirst;
+
+ @Parameter(6)
+ public String expectedLast;
+
+ @Parameter(7)
+ public Integer expectedInterval;
+
+ @Parameter(8)
+ public Double expectedFactor;
+
+ @Parameter(9)
+ public Integer expectedCount;
+
+ @Parameter(10)
+ public Integer[] expectedValues;
+
+ private final String[] graphNames = new String[] {
+ "1_week",
+ "1_month",
+ "3_months",
+ "1_year",
+ "5_years" };
+
+ private final Period[] graphIntervals = new Period[] {
+ Period.ofWeeks(1),
+ Period.ofMonths(1),
+ Period.ofMonths(3),
+ Period.ofYears(1),
+ Period.ofYears(5) };
+
+ private final long[] dataPointIntervals = new long[] {
+ DateTimeHelper.ONE_HOUR,
+ DateTimeHelper.FOUR_HOURS,
+ DateTimeHelper.TWELVE_HOURS,
+ DateTimeHelper.TWO_DAYS,
+ DateTimeHelper.TEN_DAYS };
+
+ @Test
+ public void test() {
+ GraphHistoryCompiler ghc = new GraphHistoryCompiler(DateTimeHelper.parse(
+ "2018-01-01 00:00:00"));
+ ghc.setDivisible(this.divisible);
+ for (int i = 0; i < this.graphIntervals.length; i++) {
+ ghc.addGraphType(this.graphNames[i], this.graphIntervals[i],
+ this.dataPointIntervals[i]);
+ }
+ for (String[] historyEntry : this.historyEntries) {
+ ghc.addHistoryEntry(DateTimeHelper.parse(historyEntry[0] + ":00"),
+ DateTimeHelper.parse(historyEntry[1] + ":00"),
+ Double.parseDouble(historyEntry[2]));
+ }
+ Map<String, GraphHistory> compiledGraphHistories =
+ ghc.compileGraphHistories();
+ String message = this.testDescription + "; "
+ + new Gson().toJson(compiledGraphHistories);
+ assertEquals(message, this.expectedGraphs, compiledGraphHistories.size());
+ if (null != this.expectedGraphName) {
+ GraphHistory gh = compiledGraphHistories.get(this.expectedGraphName);
+ assertNotNull(message, gh);
+ if (null != this.expectedFirst) {
+ assertEquals(message, DateTimeHelper.parse(this.expectedFirst + ":00"),
+ gh.getFirst());
+ }
+ if (null != this.expectedLast) {
+ assertEquals(message, DateTimeHelper.parse(this.expectedLast + ":00"),
+ gh.getLast());
+ }
+ if (null != this.expectedInterval) {
+ assertEquals(message, this.expectedInterval, gh.getInterval());
+ }
+ if (null != this.expectedFactor) {
+ assertEquals(message, this.expectedFactor, gh.getFactor(), 0.01);
+ }
+ if (null != this.expectedCount) {
+ assertEquals(message, this.expectedCount, gh.getCount());
+ }
+ if (null != this.expectedValues) {
+ assertEquals(message, Arrays.asList(this.expectedValues),
+ gh.getValues());
+ }
+ }
+ }
+}
diff --git a/src/test/java/org/torproject/onionoo/writer/UptimeDocumentWriterTest.java b/src/test/java/org/torproject/onionoo/writer/UptimeDocumentWriterTest.java
index b1ba2ed..030d100 100644
--- a/src/test/java/org/torproject/onionoo/writer/UptimeDocumentWriterTest.java
+++ b/src/test/java/org/torproject/onionoo/writer/UptimeDocumentWriterTest.java
@@ -248,7 +248,7 @@ public class UptimeDocumentWriterTest {
UptimeDocument.class, GABELMOO_FINGERPRINT);
this.assertOneMonthGraph(document, 2, "2014-03-16 10:00:00",
"2014-03-16 14:00:00", 2,
- Arrays.asList(new Integer[] { 499, 249 }));
+ Arrays.asList(new Integer[] { 999, 499 }));
}
@Test
@@ -256,8 +256,8 @@ public class UptimeDocumentWriterTest {
/* This relay was running for exactly 11 days and 23 hours over 2 years ago.
* This time period exactly matches 100% of a data point interval of 10 days
* plus a tiny bit less than 20% of the next data point interval. */
- this.addStatusOneWeekSample("r 2012-03-05-00 287\n",
- "r 2012-03-05-00 287\n");
+ this.addStatusOneWeekSample("r 2012-03-01-00 287\n",
+ "r 2012-03-01-00 287\n");
UptimeDocumentWriter writer = new UptimeDocumentWriter();
DescriptorSourceFactory.getDescriptorSource().readDescriptors();
writer.writeDocuments();
@@ -274,8 +274,8 @@ public class UptimeDocumentWriterTest {
/* This relay was running for exactly 12 days over 2 years ago. This time
* period exactly matches 100% of a data point interval of 10 days plus 20%
* of the next data point interval. */
- this.addStatusOneWeekSample("r 2012-03-05-00 288\n",
- "r 2012-03-05-00 288\n");
+ this.addStatusOneWeekSample("r 2012-03-01-00 288\n",
+ "r 2012-03-01-00 288\n");
UptimeDocumentWriter writer = new UptimeDocumentWriter();
DescriptorSourceFactory.getDescriptorSource().readDescriptors();
writer.writeDocuments();
@@ -283,8 +283,8 @@ public class UptimeDocumentWriterTest {
this.documentStore.getPerformedStoreOperations());
UptimeDocument document = this.documentStore.getDocument(
UptimeDocument.class, GABELMOO_FINGERPRINT);
- this.assertFiveYearGraph(document, 1, "2012-03-10 00:00:00",
- "2012-03-20 00:00:00", 2, Arrays.asList(new Integer[] { 999, 999 }));
+ this.assertFiveYearGraph(document, 1, "2012-03-06 00:00:00",
+ "2012-03-16 00:00:00", 2, Arrays.asList(new Integer[] { 999, 999 }));
}
}
_______________________________________________
tor-commits mailing list
tor-commits@xxxxxxxxxxxxxxxxxxxx
https://lists.torproject.org/cgi-bin/mailman/listinfo/tor-commits