[Author Prev][Author Next][Thread Prev][Thread Next][Author Index][Thread Index]
[tor-commits] [metrics-web/master] Group classes in subpackages.
commit 56413740309b495d3a88f5535a190eccb83cc232
Author: Karsten Loesing <karsten.loesing@xxxxxxx>
Date: Fri Dec 14 12:59:10 2012 +0100
Group classes in subpackages.
---
build.xml | 4 +-
etc/web.xml | 28 +-
.../ernie/cron/BridgeStatsFileHandler.java | 718 ------------
.../ernie/cron/ConsensusStatsFileHandler.java | 380 -------
.../ernie/cron/ExoneraTorDatabaseImporter.java | 619 -----------
.../ernie/cron/GeoipDatabaseImporter.java | 103 --
src/org/torproject/ernie/cron/Main.java | 5 +
.../ernie/cron/PerformanceStatsImporter.java | 271 -----
.../cron/RelayDescriptorDatabaseImporter.java | 3 +
.../torproject/ernie/cron/TorperfProcessor.java | 374 -------
.../cron/network/ConsensusStatsFileHandler.java | 380 +++++++
.../ernie/cron/network/GeoipDatabaseImporter.java | 105 ++
.../cron/performance/PerformanceStatsImporter.java | 271 +++++
.../ernie/cron/performance/TorperfProcessor.java | 374 +++++++
.../ernie/cron/users/BridgeStatsFileHandler.java | 718 ++++++++++++
.../status/doctor/ConsensusHealthServlet.java | 57 +
.../ernie/status/exonerator/ConsensusServlet.java | 124 +++
.../exonerator/ExoneraTorDatabaseImporter.java | 619 +++++++++++
.../ernie/status/exonerator/ExoneraTorServlet.java | 1154 ++++++++++++++++++++
.../status/exonerator/ServerDescriptorServlet.java | 132 +++
.../status/relaysearch/RelaySearchServlet.java | 505 +++++++++
.../ernie/web/ConsensusHealthServlet.java | 57 -
src/org/torproject/ernie/web/ConsensusServlet.java | 124 ---
src/org/torproject/ernie/web/Countries.java | 284 -----
src/org/torproject/ernie/web/CsvServlet.java | 97 --
.../torproject/ernie/web/ExoneraTorServlet.java | 1154 --------------------
src/org/torproject/ernie/web/GraphDataServlet.java | 279 -----
.../torproject/ernie/web/GraphImageServlet.java | 76 --
.../ernie/web/GraphParameterChecker.java | 297 -----
.../ernie/web/GraphsSubpagesServlet.java | 163 ---
src/org/torproject/ernie/web/RObject.java | 23 -
src/org/torproject/ernie/web/RObjectGenerator.java | 394 -------
.../torproject/ernie/web/RelaySearchServlet.java | 505 ---------
.../torproject/ernie/web/ResearchDataServlet.java | 260 -----
.../ernie/web/ResearchFormatsServlet.java | 24 -
.../ernie/web/ResearchPapersServlet.java | 24 -
.../torproject/ernie/web/ResearchToolsServlet.java | 24 -
.../ernie/web/ServerDescriptorServlet.java | 132 ---
.../ernie/web/TableParameterChecker.java | 120 --
src/org/torproject/ernie/web/graphs/Countries.java | 284 +++++
.../torproject/ernie/web/graphs/CsvServlet.java | 97 ++
.../ernie/web/graphs/GraphDataServlet.java | 279 +++++
.../ernie/web/graphs/GraphImageServlet.java | 76 ++
.../ernie/web/graphs/GraphParameterChecker.java | 297 +++++
.../ernie/web/graphs/GraphsSubpagesServlet.java | 163 +++
src/org/torproject/ernie/web/graphs/RObject.java | 23 +
.../ernie/web/graphs/RObjectGenerator.java | 394 +++++++
.../ernie/web/graphs/TableParameterChecker.java | 120 ++
.../ernie/web/research/ResearchDataServlet.java | 260 +++++
.../ernie/web/research/ResearchFormatsServlet.java | 24 +
.../ernie/web/research/ResearchPapersServlet.java | 24 +
.../ernie/web/research/ResearchToolsServlet.java | 24 +
52 files changed, 6528 insertions(+), 6518 deletions(-)
diff --git a/build.xml b/build.xml
index a6180b0..292a262 100644
--- a/build.xml
+++ b/build.xml
@@ -47,7 +47,7 @@
<target name="geoipdb" depends="compile">
<java fork="true"
maxmemory="1024m"
- classname="org.torproject.ernie.cron.GeoipDatabaseImporter">
+ classname="org.torproject.ernie.cron.network.GeoipDatabaseImporter">
<classpath refid="classpath"/>
</java>
</target>
@@ -56,7 +56,7 @@
<target name="exonerator" depends="compile">
<java fork="true"
maxmemory="2048m"
- classname="org.torproject.ernie.cron.ExoneraTorDatabaseImporter">
+ classname="org.torproject.ernie.status.exonerator.ExoneraTorDatabaseImporter">
<classpath refid="classpath"/>
</java>
</target>
diff --git a/etc/web.xml b/etc/web.xml
index 176b84f..79ac4c1 100644
--- a/etc/web.xml
+++ b/etc/web.xml
@@ -31,7 +31,7 @@
<servlet>
<servlet-name>GraphsSubpages</servlet-name>
<servlet-class>
- org.torproject.ernie.web.GraphsSubpagesServlet
+ org.torproject.ernie.web.graphs.GraphsSubpagesServlet
</servlet-class>
</servlet>
<servlet-mapping>
@@ -69,7 +69,7 @@
<servlet>
<servlet-name>ResearchData</servlet-name>
<servlet-class>
- org.torproject.ernie.web.ResearchDataServlet
+ org.torproject.ernie.web.research.ResearchDataServlet
</servlet-class>
<init-param>
<param-name>localDataDir</param-name>
@@ -86,7 +86,7 @@
<servlet>
<servlet-name>ResearchPapers</servlet-name>
<servlet-class>
- org.torproject.ernie.web.ResearchPapersServlet
+ org.torproject.ernie.web.research.ResearchPapersServlet
</servlet-class>
</servlet>
<servlet-mapping>
@@ -97,7 +97,7 @@
<servlet>
<servlet-name>ResearchTools</servlet-name>
<servlet-class>
- org.torproject.ernie.web.ResearchToolsServlet
+ org.torproject.ernie.web.research.ResearchToolsServlet
</servlet-class>
</servlet>
<servlet-mapping>
@@ -108,7 +108,7 @@
<servlet>
<servlet-name>ResearchFormats</servlet-name>
<servlet-class>
- org.torproject.ernie.web.ResearchFormatsServlet
+ org.torproject.ernie.web.research.ResearchFormatsServlet
</servlet-class>
</servlet>
<servlet-mapping>
@@ -130,7 +130,7 @@
<servlet>
<servlet-name>RelaySearch</servlet-name>
<servlet-class>
- org.torproject.ernie.web.RelaySearchServlet
+ org.torproject.ernie.status.relaysearch.RelaySearchServlet
</servlet-class>
</servlet>
<servlet-mapping>
@@ -141,7 +141,7 @@
<servlet>
<servlet-name>GraphImage</servlet-name>
<servlet-class>
- org.torproject.ernie.web.GraphImageServlet
+ org.torproject.ernie.web.graphs.GraphImageServlet
</servlet-class>
</servlet>
<servlet-mapping>
@@ -202,7 +202,7 @@
<servlet>
<servlet-name>Csv</servlet-name>
<servlet-class>
- org.torproject.ernie.web.CsvServlet
+ org.torproject.ernie.web.graphs.CsvServlet
</servlet-class>
</servlet>
<servlet-mapping>
@@ -213,7 +213,7 @@
<servlet>
<servlet-name>ExoneraTor</servlet-name>
<servlet-class>
- org.torproject.ernie.web.ExoneraTorServlet
+ org.torproject.ernie.status.exonerator.ExoneraTorServlet
</servlet-class>
</servlet>
<servlet-mapping>
@@ -224,7 +224,7 @@
<servlet>
<servlet-name>ServerDescriptor</servlet-name>
<servlet-class>
- org.torproject.ernie.web.ServerDescriptorServlet
+ org.torproject.ernie.status.exonerator.ServerDescriptorServlet
</servlet-class>
</servlet>
<servlet-mapping>
@@ -235,7 +235,7 @@
<servlet>
<servlet-name>Consensus</servlet-name>
<servlet-class>
- org.torproject.ernie.web.ConsensusServlet
+ org.torproject.ernie.status.exonerator.ConsensusServlet
</servlet-class>
</servlet>
<servlet-mapping>
@@ -246,7 +246,7 @@
<servlet>
<servlet-name>ConsensusHealthServlet</servlet-name>
<servlet-class>
- org.torproject.ernie.web.ConsensusHealthServlet
+ org.torproject.ernie.status.doctor.ConsensusHealthServlet
</servlet-class>
</servlet>
<servlet-mapping>
@@ -257,7 +257,7 @@
<servlet>
<servlet-name>GraphData</servlet-name>
<servlet-class>
- org.torproject.ernie.web.GraphDataServlet
+ org.torproject.ernie.web.graphs.GraphDataServlet
</servlet-class>
</servlet>
<servlet-mapping>
@@ -314,7 +314,7 @@
<listener>
<listener-class>
- org.torproject.ernie.web.RObjectGenerator
+ org.torproject.ernie.web.graphs.RObjectGenerator
</listener-class>
</listener>
diff --git a/src/org/torproject/ernie/cron/BridgeStatsFileHandler.java b/src/org/torproject/ernie/cron/BridgeStatsFileHandler.java
deleted file mode 100644
index 89d65a3..0000000
--- a/src/org/torproject/ernie/cron/BridgeStatsFileHandler.java
+++ /dev/null
@@ -1,718 +0,0 @@
-/* Copyright 2011, 2012 The Tor Project
- * See LICENSE for licensing information */
-package org.torproject.ernie.cron;
-
-import java.io.BufferedReader;
-import java.io.BufferedWriter;
-import java.io.File;
-import java.io.FileReader;
-import java.io.FileWriter;
-import java.io.IOException;
-import java.sql.Connection;
-import java.sql.DriverManager;
-import java.sql.PreparedStatement;
-import java.sql.ResultSet;
-import java.sql.SQLException;
-import java.sql.Statement;
-import java.text.ParseException;
-import java.text.SimpleDateFormat;
-import java.util.ArrayList;
-import java.util.HashMap;
-import java.util.Iterator;
-import java.util.List;
-import java.util.Map;
-import java.util.SortedMap;
-import java.util.SortedSet;
-import java.util.TimeZone;
-import java.util.TreeMap;
-import java.util.TreeSet;
-import java.util.logging.Level;
-import java.util.logging.Logger;
-
-import org.apache.commons.codec.DecoderException;
-import org.apache.commons.codec.binary.Hex;
-import org.apache.commons.codec.digest.DigestUtils;
-import org.torproject.descriptor.Descriptor;
-import org.torproject.descriptor.DescriptorFile;
-import org.torproject.descriptor.DescriptorReader;
-import org.torproject.descriptor.DescriptorSourceFactory;
-import org.torproject.descriptor.ExtraInfoDescriptor;
-import org.torproject.descriptor.NetworkStatusEntry;
-import org.torproject.descriptor.RelayNetworkStatusConsensus;
-import org.torproject.descriptor.ServerDescriptor;
-
-/**
- * Determines estimates of bridge users per country and day from the
- * extra-info descriptors that bridges publish. In a first step, the
- * number of unique IP addresses that bridges see are normalized to a
- * 24-hour period. In the next step, all bridges are excluded that have
- * been running as a relay. Finally, observations are summed up and
- * written to <code>stats/bridge-stats</code>.
- */
-public class BridgeStatsFileHandler {
-
- /**
- * Two-letter country codes of known countries.
- */
- private SortedSet<String> countries;
-
- /**
- * Intermediate results file containing bridge user numbers by country
- * as seen by single bridges, normalized to 24-hour periods.
- */
- private File bridgeStatsRawFile;
-
- /**
- * Temp file for writing intermediate results.
- */
- private File bridgeStatsRawTempFile;
-
- /**
- * Bridge user numbers by country as seen by single bridges on a given
- * day. Map keys are bridge and date written as "bridge,date", map
- * values are lines as read from <code>stats/bridge-stats-raw</code>.
- */
- private SortedMap<String, Map<String, String>> bridgeUsersRaw;
-
- /**
- * Helper file containing the hashed relay identities of all known
- * relays. These hashes are compared to the bridge identity hashes to
- * exclude bridges that have been known as relays from the statistics.
- */
- private File hashedRelayIdentitiesFile;
-
- /**
- * Known hashed relay identities used to exclude bridges that have been
- * running as relays.
- */
- private SortedSet<String> hashedRelays;
-
- /**
- * Helper file containing extra-info descriptors published by 0.2.2.x
- * bridges. If these descriptors contain geoip-stats, they are not
- * included in the results, because stats are very likely broken.
- */
- private File zeroTwoTwoDescriptorsFile;
-
- /**
- * Extra-info descriptors published by 0.2.2.x bridges. If these
- * descriptors contain geoip-stats, they are not included in the
- * results, because stats are very likely broken.
- */
- private SortedSet<String> zeroTwoTwoDescriptors;
-
- /**
- * Final results file containing the number of bridge users per country
- * and day. This file is not read in during initialization, but
- * overwritten at the end of the execution.
- */
- private File bridgeStatsFile;
-
- /**
- * Logger for this class.
- */
- private Logger logger;
-
- /* Database connection string. */
- private String connectionURL = null;
-
- private SimpleDateFormat dateTimeFormat;
-
- private File bridgesDir;
-
- private File statsDirectory;
-
- private boolean keepBridgeDescriptorImportHistory;
-
- private File archivesDirectory;
-
- private boolean keepRelayDescriptorImportHistory;
-
- /**
- * Initializes this class, including reading in intermediate results
- * files <code>stats/bridge-stats-raw</code> and
- * <code>stats/hashed-relay-identities</code>.
- */
- public BridgeStatsFileHandler(String connectionURL,
- File bridgesDir, File statsDirectory,
- boolean keepBridgeDescriptorImportHistory, File archivesDirectory,
- boolean keepRelayDescriptorImportHistory) {
-
- if (bridgesDir == null || statsDirectory == null ||
- archivesDirectory == null || statsDirectory == null) {
- throw new IllegalArgumentException();
- }
- this.bridgesDir = bridgesDir;
- this.statsDirectory = statsDirectory;
- this.keepBridgeDescriptorImportHistory =
- keepBridgeDescriptorImportHistory;
- this.archivesDirectory = archivesDirectory;
- this.keepRelayDescriptorImportHistory =
- keepRelayDescriptorImportHistory;
-
- /* Initialize set of known countries. */
- this.countries = new TreeSet<String>();
- this.countries.add("zy");
-
- /* Initialize local data structures to hold results. */
- this.bridgeUsersRaw = new TreeMap<String, Map<String, String>>();
- this.hashedRelays = new TreeSet<String>();
- this.zeroTwoTwoDescriptors = new TreeSet<String>();
-
- /* Initialize file names for intermediate and final results. */
- this.bridgeStatsRawFile = new File("stats/bridge-stats-raw");
- this.bridgeStatsRawTempFile = new File("stats/bridge-stats-raw.tmp");
- this.bridgeStatsFile = new File("stats/bridge-stats");
- this.hashedRelayIdentitiesFile = new File(
- "stats/hashed-relay-identities");
- this.zeroTwoTwoDescriptorsFile = new File(
- "stats/v022-bridge-descriptors");
-
- /* Initialize database connection string. */
- this.connectionURL = connectionURL;
-
- this.dateTimeFormat = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
- this.dateTimeFormat.setTimeZone(TimeZone.getTimeZone("UTC"));
-
- /* Initialize logger. */
- this.logger = Logger.getLogger(
- BridgeStatsFileHandler.class.getName());
-
- /* Read in bridge user numbers by country as seen by single bridges,
- * normalized to 24-hour periods. */
- if (this.bridgeStatsRawFile.exists()) {
- try {
- this.logger.fine("Reading file "
- + this.bridgeStatsRawFile.getAbsolutePath() + "...");
- BufferedReader br = new BufferedReader(new FileReader(
- this.bridgeStatsRawFile));
- String line = br.readLine();
- if (line != null) {
- /* The first line should contain headers that we need to parse
- * in order to learn what countries we were interested in when
- * writing this file. */
- if (!line.startsWith("bridge,date,time,")) {
- this.logger.warning("Incorrect first line '" + line + "' in "
- + this.bridgeStatsRawFile.getAbsolutePath() + "! This line "
- + "should contain headers! Aborting to read in this "
- + "file!");
- } else {
- String[] headers = line.split(",");
- for (int i = 3; i < headers.length; i++) {
- if (!headers[i].equals("all")) {
- this.countries.add(headers[i]);
- }
- }
- /* Read in the rest of the file. */
- while ((line = br.readLine()) != null) {
- String[] parts = line.split(",");
- if (parts.length != headers.length) {
- this.logger.warning("Corrupt line '" + line + "' in file "
- + this.bridgeStatsRawFile.getAbsolutePath()
- + "! Aborting to read this file!");
- break;
- }
- String hashedBridgeIdentity = parts[0];
- String date = parts[1];
- String time = parts[2];
- SortedMap<String, String> obs =
- new TreeMap<String, String>();
- for (int i = 3; i < parts.length; i++) {
- if (parts[i].equals("NA")) {
- continue;
- }
- if (headers[i].equals("all")) {
- obs.put("zy", parts[i]);
- } else {
- obs.put(headers[i], parts[i]);
- }
- }
- long dateTimeMillis = dateTimeFormat.parse(date + " "
- + time).getTime();
- this.addObs(hashedBridgeIdentity, dateTimeMillis, obs);
- }
- }
- }
- br.close();
- this.logger.fine("Finished reading file "
- + this.bridgeStatsRawFile.getAbsolutePath() + ".");
- } catch (IOException e) {
- this.logger.log(Level.WARNING, "Failed to read file "
- + this.bridgeStatsRawFile.getAbsolutePath() + "!", e);
- } catch (ParseException e) {
- this.logger.log(Level.WARNING, "Failed to read file "
- + this.bridgeStatsRawFile.getAbsolutePath() + "!", e);
- }
- }
-
- /* Read in known hashed relay identities used to exclude bridges that
- * have been running as relays. */
- if (this.hashedRelayIdentitiesFile.exists()) {
- try {
- this.logger.fine("Reading file "
- + this.hashedRelayIdentitiesFile.getAbsolutePath() + "...");
- BufferedReader br = new BufferedReader(new FileReader(
- this.hashedRelayIdentitiesFile));
- String line = null;
- /* Read in all lines from the file and memorize them. */
- while ((line = br.readLine()) != null) {
- this.hashedRelays.add(line);
- }
- br.close();
- this.logger.fine("Finished reading file "
- + this.hashedRelayIdentitiesFile.getAbsolutePath() + ".");
- } catch (IOException e) {
- this.logger.log(Level.WARNING, "Failed to read file "
- + this.hashedRelayIdentitiesFile.getAbsolutePath() + "!", e);
- }
- }
-
- /* Read in known extra-info descriptors published by 0.2.2.x
- * bridges. */
- if (this.zeroTwoTwoDescriptorsFile.exists()) {
- try {
- this.logger.fine("Reading file "
- + this.zeroTwoTwoDescriptorsFile.getAbsolutePath() + "...");
- BufferedReader br = new BufferedReader(new FileReader(
- this.zeroTwoTwoDescriptorsFile));
- String line = null;
- /* Read in all lines from the file and memorize them. */
- while ((line = br.readLine()) != null) {
- this.zeroTwoTwoDescriptors.add(line);
- }
- br.close();
- this.logger.fine("Finished reading file "
- + this.zeroTwoTwoDescriptorsFile.getAbsolutePath() + ".");
- } catch (IOException e) {
- this.logger.log(Level.WARNING, "Failed to read file "
- + this.zeroTwoTwoDescriptorsFile.getAbsolutePath() + "!", e);
- }
- }
- }
-
- /**
- * Adds a hashed relay identity string to the list of bridges that we
- * are going to ignore in the future. If we counted user numbers from
- * bridges that have been running as relays, our numbers would be far
- * higher than what we think is correct.
- */
- public void addHashedRelay(String hashedRelayIdentity) {
- if (!this.hashedRelays.contains(hashedRelayIdentity)) {
- this.logger.finer("Adding new hashed relay identity: "
- + hashedRelayIdentity);
- this.hashedRelays.add(hashedRelayIdentity);
- }
- }
-
- /**
- * Adds an extra-info descriptor identifier published by an 0.2.2.x
- * bridges. If this extra-info descriptor contains geoip-stats, they are
- * not included in the results, because stats are very likely broken.
- */
- public void addZeroTwoTwoDescriptor(String hashedBridgeIdentity,
- long publishedMillis) {
- String value = hashedBridgeIdentity.toUpperCase() + ","
- + this.dateTimeFormat.format(publishedMillis).
- replaceAll(" ", ",");
- if (!this.zeroTwoTwoDescriptors.contains(value)) {
- this.logger.finer("Adding new bridge 0.2.2.x extra-info "
- + "descriptor: " + value);
- this.zeroTwoTwoDescriptors.add(value);
- }
- }
-
- /**
- * Returns whether the given fingerprint is a known hashed relay
- * identity. <code>BridgeDescriptorParser</code> uses this information
- * to decide whether to continue parsing a bridge extra-descriptor
- * descriptor or not.
- */
- public boolean isKnownRelay(String hashedBridgeIdentity) {
- return this.hashedRelays.contains(hashedBridgeIdentity);
- }
-
- /**
- * Adds bridge user numbers by country as seen by a single bridge on a
- * given date and time. Bridges can publish statistics on unique IP
- * addresses multiple times a day, but we only want to include one
- * observation per day. If we already have an observation from the given
- * bridge and day, we keep the one with the later publication time and
- * discard the other one.
- */
- public void addObs(String hashedIdentity, long publishedMillis,
- Map<String, String> obs) {
- for (String country : obs.keySet()) {
- this.countries.add(country);
- }
- String dateTime = this.dateTimeFormat.format(publishedMillis);
- String date = dateTime.split(" ")[0];
- String time = dateTime.split(" ")[1];
- String shortKey = hashedIdentity + "," + date;
- String longKey = shortKey + "," + time;
- SortedMap<String, Map<String, String>> tailMap =
- this.bridgeUsersRaw.tailMap(shortKey);
- String nextKey = tailMap.isEmpty() ? null : tailMap.firstKey();
- if (nextKey == null || !nextKey.startsWith(shortKey)) {
- this.logger.finer("Adding new bridge user numbers for key "
- + longKey);
- this.bridgeUsersRaw.put(longKey, obs);
- } else if (longKey.compareTo(nextKey) > 0) {
- this.logger.finer("Replacing existing bridge user numbers (" +
- nextKey + " with new numbers: " + longKey);
- this.bridgeUsersRaw.put(longKey, obs);
- } else {
- this.logger.finer("Not replacing existing bridge user numbers (" +
- nextKey + " with new numbers (" + longKey + ").");
- }
- }
-
- public void importSanitizedBridges() {
- if (bridgesDir.exists()) {
- logger.fine("Importing files in directory " + bridgesDir + "/...");
- DescriptorReader reader =
- DescriptorSourceFactory.createDescriptorReader();
- reader.addDirectory(bridgesDir);
- if (keepBridgeDescriptorImportHistory) {
- reader.setExcludeFiles(new File(statsDirectory,
- "bridge-stats-bridge-descriptor-history"));
- }
- Iterator<DescriptorFile> descriptorFiles = reader.readDescriptors();
- while (descriptorFiles.hasNext()) {
- DescriptorFile descriptorFile = descriptorFiles.next();
- if (descriptorFile.getDescriptors() != null) {
- for (Descriptor descriptor : descriptorFile.getDescriptors()) {
- if (descriptor instanceof ServerDescriptor) {
- this.addServerDescriptor((ServerDescriptor) descriptor);
- } else if (descriptor instanceof ExtraInfoDescriptor) {
- this.addExtraInfoDescriptor(
- (ExtraInfoDescriptor) descriptor);
- }
- }
- }
- }
- logger.info("Finished importing bridge descriptors.");
- }
- }
-
- private void addServerDescriptor(ServerDescriptor descriptor) {
- if (descriptor.getPlatform() != null &&
- descriptor.getPlatform().startsWith("Tor 0.2.2")) {
- this.addZeroTwoTwoDescriptor(descriptor.getFingerprint(),
- descriptor.getPublishedMillis());
- }
- }
-
- private void addExtraInfoDescriptor(ExtraInfoDescriptor descriptor) {
- if (!this.isKnownRelay(descriptor.getFingerprint())) {
- if (descriptor.getGeoipStartTimeMillis() >= 0 &&
- descriptor.getGeoipClientOrigins() != null) {
- long seconds = (descriptor.getPublishedMillis()
- - descriptor.getGeoipStartTimeMillis()) / 1000L;
- double allUsers = 0.0D;
- Map<String, String> obs = new HashMap<String, String>();
- for (Map.Entry<String, Integer> e :
- descriptor.getGeoipClientOrigins().entrySet()) {
- String country = e.getKey();
- double users = ((double) e.getValue() - 4) * 86400.0D
- / ((double) seconds);
- allUsers += users;
- obs.put(country, String.format("%.2f", users));
- }
- obs.put("zy", String.format("%.2f", allUsers));
- this.addObs(descriptor.getFingerprint(),
- descriptor.getPublishedMillis(), obs);
- }
- if (descriptor.getBridgeStatsEndMillis() >= 0 &&
- descriptor.getBridgeIps() != null) {
- double allUsers = 0.0D;
- Map<String, String> obs = new HashMap<String, String>();
- for (Map.Entry<String, Integer> e :
- descriptor.getBridgeIps().entrySet()) {
- String country = e.getKey();
- double users = (double) e.getValue() - 4;
- allUsers += users;
- obs.put(country, String.format("%.2f", users));
- }
- obs.put("zy", String.format("%.2f", allUsers));
- this.addObs(descriptor.getFingerprint(),
- descriptor.getBridgeStatsEndMillis(), obs);
- }
- }
- }
-
- public void importRelayDescriptors() {
- if (archivesDirectory.exists()) {
- logger.fine("Importing files in directory " + archivesDirectory
- + "/...");
- DescriptorReader reader =
- DescriptorSourceFactory.createDescriptorReader();
- reader.addDirectory(archivesDirectory);
- if (keepRelayDescriptorImportHistory) {
- reader.setExcludeFiles(new File(statsDirectory,
- "bridge-stats-relay-descriptor-history"));
- }
- Iterator<DescriptorFile> descriptorFiles = reader.readDescriptors();
- while (descriptorFiles.hasNext()) {
- DescriptorFile descriptorFile = descriptorFiles.next();
- if (descriptorFile.getDescriptors() != null) {
- for (Descriptor descriptor : descriptorFile.getDescriptors()) {
- if (descriptor instanceof RelayNetworkStatusConsensus) {
- this.addRelayNetworkStatusConsensus(
- (RelayNetworkStatusConsensus) descriptor);
- }
- }
- }
- }
- }
-
- logger.info("Finished importing relay descriptors.");
- }
-
- private void addRelayNetworkStatusConsensus(
- RelayNetworkStatusConsensus consensus) {
- for (NetworkStatusEntry statusEntry :
- consensus.getStatusEntries().values()) {
- try {
- this.addHashedRelay(DigestUtils.shaHex(Hex.decodeHex(
- statusEntry.getFingerprint().toCharArray())).toUpperCase());
- } catch (DecoderException e) {
- }
- }
- }
-
- /**
- * Writes the list of hashed relay identities and bridge user numbers as
- * observed by single bridges to disk, aggregates per-day statistics for
- * all bridges, and writes those to disk, too.
- */
- public void writeFiles() {
-
- /* Write hashed relay identities to disk. */
- try {
- this.logger.fine("Writing file "
- + this.hashedRelayIdentitiesFile.getAbsolutePath() + "...");
- this.hashedRelayIdentitiesFile.getParentFile().mkdirs();
- BufferedWriter bw = new BufferedWriter(new FileWriter(
- this.hashedRelayIdentitiesFile));
- for (String hashedRelay : this.hashedRelays) {
- bw.append(hashedRelay + "\n");
- }
- bw.close();
- this.logger.fine("Finished writing file "
- + this.hashedRelayIdentitiesFile.getAbsolutePath() + ".");
- } catch (IOException e) {
- this.logger.log(Level.WARNING, "Failed to write "
- + this.hashedRelayIdentitiesFile.getAbsolutePath() + "!", e);
- }
-
- /* Write bridge extra-info descriptor identifiers to disk. */
- try {
- this.logger.fine("Writing file "
- + this.zeroTwoTwoDescriptorsFile.getAbsolutePath() + "...");
- this.zeroTwoTwoDescriptorsFile.getParentFile().mkdirs();
- BufferedWriter bw = new BufferedWriter(new FileWriter(
- this.zeroTwoTwoDescriptorsFile));
- for (String descriptorIdentifier : this.zeroTwoTwoDescriptors) {
- bw.append(descriptorIdentifier + "\n");
- }
- bw.close();
- this.logger.fine("Finished writing file "
- + this.zeroTwoTwoDescriptorsFile.getAbsolutePath() + ".");
- } catch (IOException e) {
- this.logger.log(Level.WARNING, "Failed to write "
- + this.zeroTwoTwoDescriptorsFile.getAbsolutePath() + "!", e);
- }
-
- /* Write observations made by single bridges to disk. */
- try {
- this.logger.fine("Writing file "
- + this.bridgeStatsRawFile.getAbsolutePath() + " (using "
- + this.bridgeStatsRawTempFile.getAbsolutePath() + " as temp "
- + "file)...");
- this.bridgeStatsRawTempFile.getParentFile().mkdirs();
- BufferedWriter bw = new BufferedWriter(new FileWriter(
- this.bridgeStatsRawTempFile));
- bw.append("bridge,date,time");
- for (String c : this.countries) {
- if (c.equals("zy")) {
- bw.append(",all");
- } else {
- bw.append("," + c);
- }
- }
- bw.append("\n");
- for (Map.Entry<String, Map<String, String>> e :
- this.bridgeUsersRaw.entrySet()) {
- String longKey = e.getKey();
- String[] parts = longKey.split(",");
- String hashedBridgeIdentity = parts[0];
- if (!this.hashedRelays.contains(hashedBridgeIdentity) &&
- !this.zeroTwoTwoDescriptors.contains(longKey)) {
- Map<String, String> obs = e.getValue();
- StringBuilder sb = new StringBuilder(longKey);
- for (String c : this.countries) {
- sb.append("," + (obs.containsKey(c) &&
- !obs.get(c).startsWith("-") ? obs.get(c) : "NA"));
- }
- String line = sb.toString();
- bw.append(line + "\n");
- }
- }
- bw.close();
- if (!this.bridgeStatsRawTempFile.renameTo(
- this.bridgeStatsRawFile)) {
- this.logger.fine("Failed to rename "
- + this.bridgeStatsRawTempFile.getAbsolutePath() + " to "
- + this.bridgeStatsRawFile.getAbsolutePath() + ".");
- }
- this.logger.fine("Finished writing file "
- + this.bridgeStatsRawFile.getAbsolutePath() + ".");
- } catch (IOException e) {
- this.logger.log(Level.WARNING, "Failed to write "
- + this.bridgeStatsRawFile.getAbsolutePath() + " (using "
- + this.bridgeStatsRawTempFile.getAbsolutePath() + " as temp "
- + "file)!", e);
- }
-
- /* Aggregate per-day statistics. */
- SortedMap<String, double[]> bridgeUsersPerDay =
- new TreeMap<String, double[]>();
- for (Map.Entry<String, Map<String, String>> e :
- this.bridgeUsersRaw.entrySet()) {
- String longKey = e.getKey();
- String[] parts = longKey.split(",");
- String hashedBridgeIdentity = parts[0];
- String date = parts[1];
- if (!this.hashedRelays.contains(hashedBridgeIdentity) &&
- !this.zeroTwoTwoDescriptors.contains(longKey)) {
- double[] users = bridgeUsersPerDay.get(date);
- Map<String, String> obs = e.getValue();
- if (users == null) {
- users = new double[this.countries.size()];
- bridgeUsersPerDay.put(date, users);
- }
- int i = 0;
- for (String c : this.countries) {
- if (obs.containsKey(c) && !obs.get(c).startsWith("-")) {
- users[i] += Double.parseDouble(obs.get(c));
- }
- i++;
- }
- }
- }
-
- /* Write final results of bridge users per day and country to
- * <code>stats/bridge-stats</code>. */
- try {
- this.logger.fine("Writing file "
- + this.bridgeStatsRawFile.getAbsolutePath() + "...");
- this.bridgeStatsFile.getParentFile().mkdirs();
- BufferedWriter bw = new BufferedWriter(new FileWriter(
- this.bridgeStatsFile));
- bw.append("date");
- for (String c : this.countries) {
- if (c.equals("zy")) {
- bw.append(",all");
- } else {
- bw.append("," + c);
- }
- }
- bw.append("\n");
-
- /* Write current observation. */
- for (Map.Entry<String, double[]> e : bridgeUsersPerDay.entrySet()) {
- String date = e.getKey();
- bw.append(date);
- double[] users = e.getValue();
- for (int i = 0; i < users.length; i++) {
- bw.append("," + String.format("%.2f", users[i]));
- }
- bw.append("\n");
- }
- bw.close();
- this.logger.fine("Finished writing file "
- + this.bridgeStatsFile.getAbsolutePath() + ".");
- } catch (IOException e) {
- this.logger.log(Level.WARNING, "Failed to write "
- + this.bridgeStatsFile.getAbsolutePath() + "!", e);
- }
-
- /* Add daily bridge users to database. */
- if (connectionURL != null) {
- try {
- List<String> countryList = new ArrayList<String>();
- for (String c : this.countries) {
- countryList.add(c);
- }
- Map<String, Integer> insertRows = new HashMap<String, Integer>(),
- updateRows = new HashMap<String, Integer>();
- for (Map.Entry<String, double[]> e :
- bridgeUsersPerDay.entrySet()) {
- String date = e.getKey();
- double[] users = e.getValue();
- for (int i = 0; i < users.length; i++) {
- int usersInt = (int) users[i];
- if (usersInt < 1) {
- continue;
- }
- String country = countryList.get(i);
- String key = date + "," + country;
- insertRows.put(key, usersInt);
- }
- }
- Connection conn = DriverManager.getConnection(connectionURL);
- conn.setAutoCommit(false);
- Statement statement = conn.createStatement();
- ResultSet rs = statement.executeQuery(
- "SELECT date, country, users FROM bridge_stats");
- while (rs.next()) {
- String date = rs.getDate(1).toString();
- String country = rs.getString(2);
- String key = date + "," + country;
- if (insertRows.containsKey(key)) {
- int insertRow = insertRows.remove(key);
- int oldUsers = rs.getInt(3);
- if (oldUsers != insertRow) {
- updateRows.put(key, insertRow);
- }
- }
- }
- rs.close();
- PreparedStatement psU = conn.prepareStatement(
- "UPDATE bridge_stats SET users = ? "
- + "WHERE date = ? AND country = ?");
- for (Map.Entry<String, Integer> e : updateRows.entrySet()) {
- String[] keyParts = e.getKey().split(",");
- java.sql.Date date = java.sql.Date.valueOf(keyParts[0]);
- String country = keyParts[1];
- int users = e.getValue();
- psU.clearParameters();
- psU.setInt(1, users);
- psU.setDate(2, date);
- psU.setString(3, country);
- psU.executeUpdate();
- }
- PreparedStatement psI = conn.prepareStatement(
- "INSERT INTO bridge_stats (users, date, country) "
- + "VALUES (?, ?, ?)");
- for (Map.Entry<String, Integer> e : insertRows.entrySet()) {
- String[] keyParts = e.getKey().split(",");
- java.sql.Date date = java.sql.Date.valueOf(keyParts[0]);
- String country = keyParts[1];
- int users = e.getValue();
- psI.clearParameters();
- psI.setInt(1, users);
- psI.setDate(2, date);
- psI.setString(3, country);
- psI.executeUpdate();
- }
- conn.commit();
- conn.close();
- } catch (SQLException e) {
- logger.log(Level.WARNING, "Failed to add daily bridge users to "
- + "database.", e);
- }
- }
- }
-}
-
diff --git a/src/org/torproject/ernie/cron/ConsensusStatsFileHandler.java b/src/org/torproject/ernie/cron/ConsensusStatsFileHandler.java
deleted file mode 100644
index 934401b..0000000
--- a/src/org/torproject/ernie/cron/ConsensusStatsFileHandler.java
+++ /dev/null
@@ -1,380 +0,0 @@
-/* Copyright 2011, 2012 The Tor Project
- * See LICENSE for licensing information */
-package org.torproject.ernie.cron;
-
-import java.io.BufferedReader;
-import java.io.BufferedWriter;
-import java.io.File;
-import java.io.FileReader;
-import java.io.FileWriter;
-import java.io.IOException;
-import java.sql.Connection;
-import java.sql.DriverManager;
-import java.sql.PreparedStatement;
-import java.sql.ResultSet;
-import java.sql.SQLException;
-import java.sql.Statement;
-import java.text.ParseException;
-import java.text.SimpleDateFormat;
-import java.util.HashMap;
-import java.util.Iterator;
-import java.util.Map;
-import java.util.SortedMap;
-import java.util.TimeZone;
-import java.util.TreeMap;
-import java.util.logging.Level;
-import java.util.logging.Logger;
-
-import org.torproject.descriptor.BridgeNetworkStatus;
-import org.torproject.descriptor.Descriptor;
-import org.torproject.descriptor.DescriptorFile;
-import org.torproject.descriptor.DescriptorReader;
-import org.torproject.descriptor.DescriptorSourceFactory;
-import org.torproject.descriptor.NetworkStatusEntry;
-
-/**
- * Generates statistics on the average number of relays and bridges per
- * day. Accepts parse results from <code>RelayDescriptorParser</code> and
- * <code>BridgeDescriptorParser</code> and stores them in intermediate
- * result files <code>stats/consensus-stats-raw</code> and
- * <code>stats/bridge-consensus-stats-raw</code>. Writes final results to
- * <code>stats/consensus-stats</code> for all days for which at least half
- * of the expected consensuses or statuses are known.
- */
-public class ConsensusStatsFileHandler {
-
- /**
- * Intermediate results file holding the number of running bridges per
- * bridge status.
- */
- private File bridgeConsensusStatsRawFile;
-
- /**
- * Number of running bridges in a given bridge status. Map keys are
- * bridge status times formatted as "yyyy-MM-dd HH:mm:ss", map values
- * are lines as read from <code>stats/bridge-consensus-stats-raw</code>.
- */
- private SortedMap<String, String> bridgesRaw;
-
- /**
- * Average number of running bridges per day. Map keys are dates
- * formatted as "yyyy-MM-dd", map values are the last column as written
- * to <code>stats/consensus-stats</code>.
- */
- private SortedMap<String, String> bridgesPerDay;
-
- /**
- * Logger for this class.
- */
- private Logger logger;
-
- private int bridgeResultsAdded = 0;
-
- /* Database connection string. */
- private String connectionURL = null;
-
- private SimpleDateFormat dateTimeFormat;
-
- private File bridgesDir;
-
- private File statsDirectory;
-
- private boolean keepImportHistory;
-
- /**
- * Initializes this class, including reading in intermediate results
- * files <code>stats/consensus-stats-raw</code> and
- * <code>stats/bridge-consensus-stats-raw</code> and final results file
- * <code>stats/consensus-stats</code>.
- */
- public ConsensusStatsFileHandler(String connectionURL,
- File bridgesDir, File statsDirectory,
- boolean keepImportHistory) {
-
- if (bridgesDir == null || statsDirectory == null) {
- throw new IllegalArgumentException();
- }
- this.bridgesDir = bridgesDir;
- this.statsDirectory = statsDirectory;
- this.keepImportHistory = keepImportHistory;
-
- /* Initialize local data structures to hold intermediate and final
- * results. */
- this.bridgesPerDay = new TreeMap<String, String>();
- this.bridgesRaw = new TreeMap<String, String>();
-
- /* Initialize file names for intermediate and final results files. */
- this.bridgeConsensusStatsRawFile = new File(
- "stats/bridge-consensus-stats-raw");
-
- /* Initialize database connection string. */
- this.connectionURL = connectionURL;
-
- this.dateTimeFormat = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
- this.dateTimeFormat.setTimeZone(TimeZone.getTimeZone("UTC"));
-
- /* Initialize logger. */
- this.logger = Logger.getLogger(
- ConsensusStatsFileHandler.class.getName());
-
- /* Read in number of running bridges per bridge status. */
- if (this.bridgeConsensusStatsRawFile.exists()) {
- try {
- this.logger.fine("Reading file "
- + this.bridgeConsensusStatsRawFile.getAbsolutePath() + "...");
- BufferedReader br = new BufferedReader(new FileReader(
- this.bridgeConsensusStatsRawFile));
- String line = null;
- while ((line = br.readLine()) != null) {
- if (line.startsWith("date")) {
- /* Skip headers. */
- continue;
- }
- String[] parts = line.split(",");
- String dateTime = parts[0];
- if (parts.length == 2) {
- this.bridgesRaw.put(dateTime, line + ",0");
- } else if (parts.length == 3) {
- this.bridgesRaw.put(dateTime, line);
- } else {
- this.logger.warning("Corrupt line '" + line + "' in file "
- + this.bridgeConsensusStatsRawFile.getAbsolutePath()
- + "! Aborting to read this file!");
- break;
- }
- }
- br.close();
- this.logger.fine("Finished reading file "
- + this.bridgeConsensusStatsRawFile.getAbsolutePath() + ".");
- } catch (IOException e) {
- this.logger.log(Level.WARNING, "Failed to read file "
- + this.bridgeConsensusStatsRawFile.getAbsolutePath() + "!",
- e);
- }
- }
- }
-
- /**
- * Adds the intermediate results of the number of running bridges in a
- * given bridge status to the existing observations.
- */
- public void addBridgeConsensusResults(long publishedMillis, int running,
- int runningEc2Bridges) {
- String published = dateTimeFormat.format(publishedMillis);
- String line = published + "," + running + "," + runningEc2Bridges;
- if (!this.bridgesRaw.containsKey(published)) {
- this.logger.finer("Adding new bridge numbers: " + line);
- this.bridgesRaw.put(published, line);
- this.bridgeResultsAdded++;
- } else if (!line.equals(this.bridgesRaw.get(published))) {
- this.logger.warning("The numbers of running bridges we were just "
- + "given (" + line + ") are different from what we learned "
- + "before (" + this.bridgesRaw.get(published) + ")! "
- + "Overwriting!");
- this.bridgesRaw.put(published, line);
- }
- }
-
- public void importSanitizedBridges() {
- if (bridgesDir.exists()) {
- logger.fine("Importing files in directory " + bridgesDir + "/...");
- DescriptorReader reader =
- DescriptorSourceFactory.createDescriptorReader();
- reader.addDirectory(bridgesDir);
- if (keepImportHistory) {
- reader.setExcludeFiles(new File(statsDirectory,
- "consensus-stats-bridge-descriptor-history"));
- }
- Iterator<DescriptorFile> descriptorFiles = reader.readDescriptors();
- while (descriptorFiles.hasNext()) {
- DescriptorFile descriptorFile = descriptorFiles.next();
- if (descriptorFile.getDescriptors() != null) {
- for (Descriptor descriptor : descriptorFile.getDescriptors()) {
- if (descriptor instanceof BridgeNetworkStatus) {
- this.addBridgeNetworkStatus(
- (BridgeNetworkStatus) descriptor);
- }
- }
- }
- }
- logger.info("Finished importing bridge descriptors.");
- }
- }
-
- private void addBridgeNetworkStatus(BridgeNetworkStatus status) {
- int runningBridges = 0, runningEc2Bridges = 0;
- for (NetworkStatusEntry statusEntry :
- status.getStatusEntries().values()) {
- if (statusEntry.getFlags().contains("Running")) {
- runningBridges++;
- if (statusEntry.getNickname().startsWith("ec2bridge")) {
- runningEc2Bridges++;
- }
- }
- }
- this.addBridgeConsensusResults(status.getPublishedMillis(),
- runningBridges, runningEc2Bridges);
- }
-
- /**
- * Aggregates the raw observations on relay and bridge numbers and
- * writes both raw and aggregate observations to disk.
- */
- public void writeFiles() {
-
- /* Go through raw observations of numbers of running bridges in bridge
- * statuses, calculate averages per day, and add these averages to
- * final results. */
- if (!this.bridgesRaw.isEmpty()) {
- String tempDate = null;
- int brunning = 0, brunningEc2 = 0, statuses = 0;
- Iterator<String> it = this.bridgesRaw.values().iterator();
- boolean haveWrittenFinalLine = false;
- while (it.hasNext() || !haveWrittenFinalLine) {
- String next = it.hasNext() ? it.next() : null;
- /* Finished reading a day or even all lines? */
- if (tempDate != null && (next == null
- || !next.substring(0, 10).equals(tempDate))) {
- /* Only write results if we have seen at least half of all
- * statuses. */
- if (statuses >= 24) {
- String line = "," + (brunning / statuses) + ","
- + (brunningEc2 / statuses);
- /* Are our results new? */
- if (!this.bridgesPerDay.containsKey(tempDate)) {
- this.logger.finer("Adding new average bridge numbers: "
- + tempDate + line);
- this.bridgesPerDay.put(tempDate, line);
- } else if (!line.equals(this.bridgesPerDay.get(tempDate))) {
- this.logger.finer("Replacing existing average bridge "
- + "numbers (" + this.bridgesPerDay.get(tempDate)
- + " with new numbers: " + line);
- this.bridgesPerDay.put(tempDate, line);
- }
- }
- brunning = brunningEc2 = statuses = 0;
- haveWrittenFinalLine = (next == null);
- }
- /* Sum up number of running bridges. */
- if (next != null) {
- tempDate = next.substring(0, 10);
- statuses++;
- String[] parts = next.split(",");
- brunning += Integer.parseInt(parts[1]);
- brunningEc2 += Integer.parseInt(parts[2]);
- }
- }
- }
-
- /* Write raw numbers of running bridges to disk. */
- try {
- this.logger.fine("Writing file "
- + this.bridgeConsensusStatsRawFile.getAbsolutePath() + "...");
- this.bridgeConsensusStatsRawFile.getParentFile().mkdirs();
- BufferedWriter bw = new BufferedWriter(
- new FileWriter(this.bridgeConsensusStatsRawFile));
- bw.append("datetime,brunning,brunningec2\n");
- for (String line : this.bridgesRaw.values()) {
- bw.append(line + "\n");
- }
- bw.close();
- this.logger.fine("Finished writing file "
- + this.bridgeConsensusStatsRawFile.getAbsolutePath() + ".");
- } catch (IOException e) {
- this.logger.log(Level.WARNING, "Failed to write file "
- + this.bridgeConsensusStatsRawFile.getAbsolutePath() + "!",
- e);
- }
-
- /* Add average number of bridges per day to the database. */
- if (connectionURL != null) {
- try {
- Map<String, String> insertRows = new HashMap<String, String>(),
- updateRows = new HashMap<String, String>();
- insertRows.putAll(this.bridgesPerDay);
- Connection conn = DriverManager.getConnection(connectionURL);
- conn.setAutoCommit(false);
- Statement statement = conn.createStatement();
- ResultSet rs = statement.executeQuery(
- "SELECT date, avg_running, avg_running_ec2 "
- + "FROM bridge_network_size");
- while (rs.next()) {
- String date = rs.getDate(1).toString();
- if (insertRows.containsKey(date)) {
- String insertRow = insertRows.remove(date);
- String[] parts = insertRow.substring(1).split(",");
- long newAvgRunning = Long.parseLong(parts[0]);
- long newAvgRunningEc2 = Long.parseLong(parts[1]);
- long oldAvgRunning = rs.getLong(2);
- long oldAvgRunningEc2 = rs.getLong(3);
- if (newAvgRunning != oldAvgRunning ||
- newAvgRunningEc2 != oldAvgRunningEc2) {
- updateRows.put(date, insertRow);
- }
- }
- }
- rs.close();
- PreparedStatement psU = conn.prepareStatement(
- "UPDATE bridge_network_size SET avg_running = ?, "
- + "avg_running_ec2 = ? WHERE date = ?");
- for (Map.Entry<String, String> e : updateRows.entrySet()) {
- java.sql.Date date = java.sql.Date.valueOf(e.getKey());
- String[] parts = e.getValue().substring(1).split(",");
- long avgRunning = Long.parseLong(parts[0]);
- long avgRunningEc2 = Long.parseLong(parts[1]);
- psU.clearParameters();
- psU.setLong(1, avgRunning);
- psU.setLong(2, avgRunningEc2);
- psU.setDate(3, date);
- psU.executeUpdate();
- }
- PreparedStatement psI = conn.prepareStatement(
- "INSERT INTO bridge_network_size (avg_running, "
- + "avg_running_ec2, date) VALUES (?, ?, ?)");
- for (Map.Entry<String, String> e : insertRows.entrySet()) {
- java.sql.Date date = java.sql.Date.valueOf(e.getKey());
- String[] parts = e.getValue().substring(1).split(",");
- long avgRunning = Long.parseLong(parts[0]);
- long avgRunningEc2 = Long.parseLong(parts[1]);
- psI.clearParameters();
- psI.setLong(1, avgRunning);
- psI.setLong(2, avgRunningEc2);
- psI.setDate(3, date);
- psI.executeUpdate();
- }
- conn.commit();
- conn.close();
- } catch (SQLException e) {
- logger.log(Level.WARNING, "Failed to add average bridge numbers "
- + "to database.", e);
- }
- }
-
- /* Write stats. */
- StringBuilder dumpStats = new StringBuilder("Finished writing "
- + "statistics on bridge network statuses to disk.\nAdded "
- + this.bridgeResultsAdded + " bridge network status(es) in this "
- + "execution.");
- long now = System.currentTimeMillis();
- SimpleDateFormat dateTimeFormat =
- new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
- dateTimeFormat.setTimeZone(TimeZone.getTimeZone("UTC"));
- if (this.bridgesRaw.isEmpty()) {
- dumpStats.append("\nNo bridge status known yet.");
- } else {
- dumpStats.append("\nLast known bridge status was published "
- + this.bridgesRaw.lastKey() + ".");
- try {
- if (now - 6L * 60L * 60L * 1000L > dateTimeFormat.parse(
- this.bridgesRaw.lastKey()).getTime()) {
- logger.warning("Last known bridge status is more than 6 hours "
- + "old: " + this.bridgesRaw.lastKey());
- }
- } catch (ParseException e) {
- /* Can't parse the timestamp? Whatever. */
- }
- }
- logger.info(dumpStats.toString());
- }
-}
-
diff --git a/src/org/torproject/ernie/cron/ExoneraTorDatabaseImporter.java b/src/org/torproject/ernie/cron/ExoneraTorDatabaseImporter.java
deleted file mode 100644
index 5d007c2..0000000
--- a/src/org/torproject/ernie/cron/ExoneraTorDatabaseImporter.java
+++ /dev/null
@@ -1,619 +0,0 @@
-/* Copyright 2011, 2012 The Tor Project
- * See LICENSE for licensing information */
-package org.torproject.ernie.cron;
-
-import java.io.BufferedInputStream;
-import java.io.BufferedReader;
-import java.io.BufferedWriter;
-import java.io.ByteArrayOutputStream;
-import java.io.File;
-import java.io.FileInputStream;
-import java.io.FileReader;
-import java.io.FileWriter;
-import java.io.IOException;
-import java.io.StringReader;
-import java.io.UnsupportedEncodingException;
-import java.sql.CallableStatement;
-import java.sql.Connection;
-import java.sql.DriverManager;
-import java.sql.SQLException;
-import java.sql.Timestamp;
-import java.sql.Types;
-import java.text.ParseException;
-import java.text.SimpleDateFormat;
-import java.util.Calendar;
-import java.util.HashMap;
-import java.util.HashSet;
-import java.util.Map;
-import java.util.Set;
-import java.util.Stack;
-import java.util.TimeZone;
-
-import org.apache.commons.codec.binary.Base64;
-import org.apache.commons.codec.binary.Hex;
-import org.apache.commons.codec.digest.DigestUtils;
-
-/* Import Tor descriptors into the ExoneraTor database. */
-public class ExoneraTorDatabaseImporter {
-
- /* Main function controlling the parsing process. */
- public static void main(String[] args) {
- readConfiguration();
- openDatabaseConnection();
- prepareDatabaseStatements();
- createLockFile();
- readImportHistoryToMemory();
- parseDescriptors();
- writeImportHistoryToDisk();
- closeDatabaseConnection();
- deleteLockFile();
- }
-
- /* JDBC string of the ExoneraTor database. */
- private static String jdbcString;
-
- /* Directory from which to import descriptors. */
- private static String importDirString;
-
- /* Learn JDBC string and directory to parse descriptors from. */
- private static void readConfiguration() {
- File configFile = new File("config");
- if (!configFile.exists()) {
- System.err.println("Could not find config file. Exiting.");
- System.exit(1);
- }
- String line = null;
- try {
- BufferedReader br = new BufferedReader(new FileReader(configFile));
- while ((line = br.readLine()) != null) {
- if (line.startsWith("#") || line.length() < 1) {
- continue;
- } else if (line.startsWith("ExoneraTorDatabaseJdbc")) {
- jdbcString = line.split(" ")[1];
- } else if (line.startsWith("ExoneraTorImportDirectory")) {
- importDirString = line.split(" ")[1];
- } else {
- /* Ignore unrecognized configuration keys. */
- }
- }
- br.close();
- } catch (IOException e) {
- System.err.println("Could not parse config file. Exiting.");
- System.exit(1);
- }
- }
-
- /* Database connection. */
- private static Connection connection;
-
- /* Open a database connection using the JDBC string in the config. */
- private static void openDatabaseConnection() {
- try {
- connection = DriverManager.getConnection(jdbcString);
- } catch (SQLException e) {
- System.out.println("Could not connect to database. Exiting.");
- System.exit(1);
- }
- }
-
- /* Callable statements to import data into the database. */
- private static CallableStatement insertDescriptorStatement;
- private static CallableStatement insertStatusentryStatement;
- private static CallableStatement insertConsensusStatement;
- private static CallableStatement insertExitlistentryStatement;
-
- /* Prepare statements for importing data into the database. */
- private static void prepareDatabaseStatements() {
- try {
- insertDescriptorStatement = connection.prepareCall(
- "{call insert_descriptor(?, ?)}");
- insertStatusentryStatement = connection.prepareCall(
- "{call insert_statusentry(?, ?, ?, ?, ?, ?, ?)}");
- insertConsensusStatement = connection.prepareCall(
- "{call insert_consensus(?, ?)}");
- insertExitlistentryStatement = connection.prepareCall(
- "{call insert_exitlistentry(?, ?, ?, ?, ?)}");
- } catch (SQLException e) {
- System.out.println("Could not prepare callable statements to "
- + "import data into the database. Exiting.");
- System.exit(1);
- }
- }
-
- /* Create a local lock file to prevent other instances of this import
- * tool to run concurrently. */
- private static void createLockFile() {
- File lockFile = new File("exonerator-lock");
- try {
- if (lockFile.exists()) {
- BufferedReader br = new BufferedReader(new FileReader(lockFile));
- long runStarted = Long.parseLong(br.readLine());
- br.close();
- if (System.currentTimeMillis() - runStarted
- < 6L * 60L * 60L * 1000L) {
- System.out.println("File 'exonerator-lock' is less than 6 "
- + "hours old. Exiting.");
- System.exit(1);
- } else {
- System.out.println("File 'exonerator-lock' is at least 6 hours "
- + "old. Overwriting and executing anyway.");
- }
- }
- BufferedWriter bw = new BufferedWriter(new FileWriter(
- "exonerator-lock"));
- bw.append(String.valueOf(System.currentTimeMillis()) + "\n");
- bw.close();
- } catch (IOException e) {
- System.out.println("Could not create 'exonerator-lock' file. "
- + "Exiting.");
- System.exit(1);
- }
- }
-
- /* Last and next parse histories containing paths of parsed files and
- * last modified times. */
- private static Map<String, Long>
- lastImportHistory = new HashMap<String, Long>(),
- nextImportHistory = new HashMap<String, Long>();
-
- /* Read stats/exonerator-import-history file from disk and remember
- * locally when files were last parsed. */
- private static void readImportHistoryToMemory() {
- File parseHistoryFile = new File("stats",
- "exonerator-import-history");
- if (parseHistoryFile.exists()) {
- try {
- BufferedReader br = new BufferedReader(new FileReader(
- parseHistoryFile));
- String line = null;
- int lineNumber = 0;
- while ((line = br.readLine()) != null) {
- lineNumber++;
- String[] parts = line.split(",");
- if (parts.length != 2) {
- System.out.println("File 'stats/exonerator-import-history' "
- + "contains a corrupt entry in line " + lineNumber
- + ". Ignoring parse history file entirely.");
- lastImportHistory.clear();
- br.close();
- return;
- }
- long lastModified = Long.parseLong(parts[0]);
- String filename = parts[1];
- lastImportHistory.put(filename, lastModified);
- }
- br.close();
- } catch (IOException e) {
- System.out.println("Could not read import history. Ignoring.");
- lastImportHistory.clear();
- }
- }
- }
-
- /* Parse descriptors in the import directory and its subdirectories. */
- private static void parseDescriptors() {
- File file = new File(importDirString);
- if (!file.exists()) {
- System.out.println("File or directory " + importDirString + " does "
- + "not exist. Exiting.");
- return;
- }
- Stack<File> files = new Stack<File>();
- files.add(file);
- while (!files.isEmpty()) {
- file = files.pop();
- if (file.isDirectory()) {
- for (File f : file.listFiles()) {
- files.add(f);
- }
- } else {
- parseFile(file);
- }
- }
- }
-
- /* Import a file if it wasn't imported before, and add it to the import
- * history for the next execution. */
- private static void parseFile(File file) {
- long lastModified = file.lastModified();
- String filename = file.getName();
- nextImportHistory.put(filename, lastModified);
- if (!lastImportHistory.containsKey(filename) ||
- lastImportHistory.get(filename) < lastModified) {
- try {
- FileInputStream fis = new FileInputStream(file);
- BufferedInputStream bis = new BufferedInputStream(fis);
- ByteArrayOutputStream baos = new ByteArrayOutputStream();
- int len;
- byte[] bytes = new byte[1024];
- while ((len = bis.read(bytes, 0, 1024)) >= 0) {
- baos.write(bytes, 0, len);
- }
- bis.close();
- byte[] allBytes = baos.toByteArray();
- splitFile(file, allBytes);
- } catch (IOException e) {
- System.out.println("Could not read '" + file + "' to memory. "
- + "Skipping.");
- nextImportHistory.remove(filename);
- }
- }
- }
-
- /* Detect what descriptor type is contained in a file and split it to
- * parse the single descriptors. */
- private static void splitFile(File file, byte[] bytes) {
- try {
- String asciiString = new String(bytes, "US-ASCII");
- BufferedReader br = new BufferedReader(new StringReader(
- asciiString));
- String line = br.readLine();
- while (line != null && line.startsWith("@")) {
- line = br.readLine();
- }
- if (line == null) {
- return;
- }
- br.close();
- String startToken = null;
- if (line.startsWith("router ")) {
- startToken = "router ";
- } else if (line.equals("network-status-version 3")) {
- startToken = "network-status-version 3";
- } else if (line.startsWith("Downloaded ") ||
- line.startsWith("ExitNode ")) {
- startToken = "ExitNode ";
- } else {
- System.out.println("Unknown descriptor type in file '" + file
- + "'. Ignoring.");
- return;
- }
- String splitToken = "\n" + startToken;
- int length = bytes.length, start = asciiString.indexOf(startToken);
- while (start < length) {
- int end = asciiString.indexOf(splitToken, start);
- if (end < 0) {
- end = length;
- } else {
- end += 1;
- }
- byte[] descBytes = new byte[end - start];
- System.arraycopy(bytes, start, descBytes, 0, end - start);
- if (startToken.equals("router ")) {
- parseServerDescriptor(file, descBytes);
- } else if (startToken.equals("network-status-version 3")) {
- parseConsensus(file, descBytes);
- } else if (startToken.equals("ExitNode ")) {
- parseExitList(file, descBytes);
- }
- start = end;
- }
- } catch (IOException e) {
- System.out.println("Could not parse descriptor '" + file + "'. "
- + "Skipping.");
- }
- }
-
- /* Date format to parse UTC timestamps. */
- private static SimpleDateFormat parseFormat;
- static {
- parseFormat = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
- parseFormat.setTimeZone(TimeZone.getTimeZone("UTC"));
- }
-
- /* Parse a single server descriptor. */
- private static void parseServerDescriptor(File file, byte[] bytes) {
- String ascii = "";
- try {
- ascii = new String(bytes, "US-ASCII");
- } catch (UnsupportedEncodingException e) {
- /* We know that US-ASCII is a supported encoding. */
- }
- String startToken = "router ";
- String sigToken = "\nrouter-signature\n";
- int start = ascii.indexOf(startToken);
- int sig = ascii.indexOf(sigToken) + sigToken.length();
- String descriptor = null;
- if (start >= 0 || sig >= 0 || sig > start) {
- byte[] forDigest = new byte[sig - start];
- System.arraycopy(bytes, start, forDigest, 0, sig - start);
- descriptor = DigestUtils.shaHex(forDigest);
- }
- if (descriptor == null) {
- System.out.println("Could not calculate descriptor digest. "
- + "Skipping.");
- return;
- }
- importDescriptor(descriptor, bytes);
- }
-
- /* Import a single server descriptor into the database. */
- private static void importDescriptor(String descriptor,
- byte[] rawDescriptor) {
- try {
- insertDescriptorStatement.clearParameters();
- insertDescriptorStatement.setString(1, descriptor);
- insertDescriptorStatement.setBytes(2, rawDescriptor);
- insertDescriptorStatement.execute();
- } catch (SQLException e) {
- System.out.println("Could not import descriptor into the "
- + "database. Exiting.");
- System.exit(1);
- }
- }
-
- /* Parse a consensus. */
- private static void parseConsensus(File file, byte[] bytes) {
- try {
- BufferedReader br = new BufferedReader(new StringReader(new String(
- bytes, "US-ASCII")));
- String line, fingerprint = null, descriptor = null;
- Set<String> orAddresses = new HashSet<String>();
- long validAfterMillis = -1L;
- StringBuilder rawStatusentryBuilder = null;
- boolean isRunning = false;
- while ((line = br.readLine()) != null) {
- if (line.startsWith("vote-status ") &&
- !line.equals("vote-status consensus")) {
- System.out.println("File '" + file + "' contains network status "
- + "*votes*, not network status *consensuses*. Skipping.");
- return;
- } else if (line.startsWith("valid-after ")) {
- String validAfterTime = line.substring("valid-after ".length());
- try {
- validAfterMillis = parseFormat.parse(validAfterTime).
- getTime();
- } catch (ParseException e) {
- System.out.println("Could not parse valid-after timestamp in "
- + "'" + file + "'. Skipping.");
- return;
- }
- importConsensus(validAfterMillis, bytes);
- } else if (line.startsWith("r ") ||
- line.equals("directory-footer")) {
- if (isRunning) {
- byte[] rawStatusentry = rawStatusentryBuilder.toString().
- getBytes();
- importStatusentry(validAfterMillis, fingerprint, descriptor,
- orAddresses, rawStatusentry);
- orAddresses = new HashSet<String>();
- }
- if (line.equals("directory-footer")) {
- return;
- }
- rawStatusentryBuilder = new StringBuilder(line + "\n");
- String[] parts = line.split(" ");
- if (parts.length < 9) {
- System.out.println("Could not parse r line '" + line
- + "'. Skipping.");
- return;
- }
- fingerprint = Hex.encodeHexString(Base64.decodeBase64(parts[2]
- + "=")).toLowerCase();
- descriptor = Hex.encodeHexString(Base64.decodeBase64(parts[3]
- + "=")).toLowerCase();
- orAddresses.add(parts[6]);
- } else if (line.startsWith("a ")) {
- rawStatusentryBuilder.append(line + "\n");
- orAddresses.add(line.substring("a ".length(),
- line.lastIndexOf(":")));
- } else if (line.startsWith("s ") || line.equals("s")) {
- rawStatusentryBuilder.append(line + "\n");
- isRunning = line.contains(" Running");
- } else if (rawStatusentryBuilder != null) {
- rawStatusentryBuilder.append(line + "\n");
- }
- }
- } catch (IOException e) {
- System.out.println("Could not parse consensus. Skipping.");
- return;
- }
- }
-
- /* UTC calendar for importing timestamps into the database. */
- private static Calendar calendarUTC = Calendar.getInstance(
- TimeZone.getTimeZone("UTC"));
-
- /* Import a status entry with one or more OR addresses into the
- * database. */
- private static void importStatusentry(long validAfterMillis,
- String fingerprint, String descriptor, Set<String> orAddresses,
- byte[] rawStatusentry) {
- try {
- for (String orAddress : orAddresses) {
- insertStatusentryStatement.clearParameters();
- insertStatusentryStatement.setTimestamp(1,
- new Timestamp(validAfterMillis), calendarUTC);
- insertStatusentryStatement.setString(2, fingerprint);
- insertStatusentryStatement.setString(3, descriptor);
- if (!orAddress.contains(":")) {
- String[] addressParts = orAddress.split("\\.");
- byte[] address24Bytes = new byte[3];
- address24Bytes[0] = (byte) Integer.parseInt(addressParts[0]);
- address24Bytes[1] = (byte) Integer.parseInt(addressParts[1]);
- address24Bytes[2] = (byte) Integer.parseInt(addressParts[2]);
- String orAddress24 = Hex.encodeHexString(address24Bytes);
- insertStatusentryStatement.setString(4, orAddress24);
- insertStatusentryStatement.setNull(5, Types.VARCHAR);
- insertStatusentryStatement.setString(6, orAddress);
- } else {
- StringBuilder addressHex = new StringBuilder();
- int start = orAddress.startsWith("[::") ? 2 : 1;
- int end = orAddress.length()
- - (orAddress.endsWith("::]") ? 2 : 1);
- String[] parts = orAddress.substring(start, end).split(":", -1);
- for (int i = 0; i < parts.length; i++) {
- String part = parts[i];
- if (part.length() == 0) {
- addressHex.append("x");
- } else if (part.length() <= 4) {
- addressHex.append(String.format("%4s", part));
- } else {
- addressHex = null;
- break;
- }
- }
- String orAddress48 = null;
- if (addressHex != null) {
- String addressHexString = addressHex.toString();
- addressHexString = addressHexString.replaceFirst("x",
- String.format("%" + (33 - addressHexString.length())
- + "s", "0"));
- if (!addressHexString.contains("x") &&
- addressHexString.length() == 32) {
- orAddress48 = addressHexString.replaceAll(" ", "0").
- toLowerCase().substring(0, 12);
- }
- }
- if (orAddress48 != null) {
- insertStatusentryStatement.setNull(4, Types.VARCHAR);
- insertStatusentryStatement.setString(5, orAddress48);
- insertStatusentryStatement.setString(6,
- orAddress.replaceAll("[\\[\\]]", ""));
- } else {
- System.err.println("Could not import status entry with IPv6 "
- + "address '" + orAddress + "'. Exiting.");
- System.exit(1);
- }
- }
- insertStatusentryStatement.setBytes(7, rawStatusentry);
- insertStatusentryStatement.execute();
- }
- } catch (SQLException e) {
- System.out.println("Could not import status entry. Exiting.");
- System.exit(1);
- }
- }
-
- /* Import a consensus into the database. */
- private static void importConsensus(long validAfterMillis,
- byte[] rawConsensus) {
- try {
- insertConsensusStatement.clearParameters();
- insertConsensusStatement.setTimestamp(1,
- new Timestamp(validAfterMillis), calendarUTC);
- insertConsensusStatement.setBytes(2, rawConsensus);
- insertConsensusStatement.execute();
- } catch (SQLException e) {
- System.out.println("Could not import consensus. Exiting.");
- System.exit(1);
- }
- }
-
- /* Parse an exit list. */
- private static void parseExitList(File file, byte[] bytes) {
- try {
- BufferedReader br = new BufferedReader(new StringReader(new String(
- bytes, "US-ASCII")));
- String fingerprint = null;
- Set<String> exitAddressLines = new HashSet<String>();
- StringBuilder rawExitlistentryBuilder = new StringBuilder();
- while (true) {
- String line = br.readLine();
- if ((line == null || line.startsWith("ExitNode ")) &&
- fingerprint != null) {
- for (String exitAddressLine : exitAddressLines) {
- String[] parts = exitAddressLine.split(" ");
- String exitAddress = parts[1];
- /* TODO Extend the following code for IPv6 once the exit list
- * format supports it. */
- String[] exitAddressParts = exitAddress.split("\\.");
- byte[] exitAddress24Bytes = new byte[3];
- exitAddress24Bytes[0] = (byte) Integer.parseInt(
- exitAddressParts[0]);
- exitAddress24Bytes[1] = (byte) Integer.parseInt(
- exitAddressParts[1]);
- exitAddress24Bytes[2] = (byte) Integer.parseInt(
- exitAddressParts[2]);
- String exitAddress24 = Hex.encodeHexString(
- exitAddress24Bytes);
- String scannedTime = parts[2] + " " + parts[3];
- long scannedMillis = -1L;
- try {
- scannedMillis = parseFormat.parse(scannedTime).getTime();
- } catch (ParseException e) {
- System.out.println("Could not parse timestamp in "
- + "'" + file + "'. Skipping.");
- return;
- }
- byte[] rawExitlistentry = rawExitlistentryBuilder.toString().
- getBytes();
- importExitlistentry(fingerprint, exitAddress24, exitAddress,
- scannedMillis, rawExitlistentry);
- }
- exitAddressLines.clear();
- rawExitlistentryBuilder = new StringBuilder();
- }
- if (line == null) {
- break;
- }
- rawExitlistentryBuilder.append(line + "\n");
- if (line.startsWith("ExitNode ")) {
- fingerprint = line.substring("ExitNode ".length()).
- toLowerCase();
- } else if (line.startsWith("ExitAddress ")) {
- exitAddressLines.add(line);
- }
- }
- br.close();
- } catch (IOException e) {
- System.out.println("Could not parse exit list. Skipping.");
- return;
- }
- }
-
- /* Import an exit list entry into the database. */
- private static void importExitlistentry(String fingerprint,
- String exitAddress24, String exitAddress, long scannedMillis,
- byte[] rawExitlistentry) {
- try {
- insertExitlistentryStatement.clearParameters();
- insertExitlistentryStatement.setString(1, fingerprint);
- insertExitlistentryStatement.setString(2, exitAddress24);
- insertExitlistentryStatement.setString(3, exitAddress);
- insertExitlistentryStatement.setTimestamp(4,
- new Timestamp(scannedMillis), calendarUTC);
- insertExitlistentryStatement.setBytes(5, rawExitlistentry);
- insertExitlistentryStatement.execute();
- } catch (SQLException e) {
- System.out.println("Could not import exit list entry. Exiting.");
- System.exit(1);
- }
- }
-
- /* Write parse history from memory to disk for the next execution. */
- private static void writeImportHistoryToDisk() {
- File parseHistoryFile = new File("stats/exonerator-import-history");
- parseHistoryFile.getParentFile().mkdirs();
- try {
- BufferedWriter bw = new BufferedWriter(new FileWriter(
- parseHistoryFile));
- for (Map.Entry<String, Long> historyEntry :
- nextImportHistory.entrySet()) {
- bw.write(String.valueOf(historyEntry.getValue()) + ","
- + historyEntry.getKey() + "\n");
- }
- bw.close();
- } catch (IOException e) {
- System.out.println("File 'stats/exonerator-import-history' could "
- + "not be written. Ignoring.");
- }
- }
-
- /* Close the database connection. */
- private static void closeDatabaseConnection() {
- try {
- connection.close();
- } catch (SQLException e) {
- System.out.println("Could not close database connection. "
- + "Ignoring.");
- }
- }
-
- /* Delete the exonerator-lock file to allow the next executing of this
- * tool. */
- private static void deleteLockFile() {
- new File("exonerator-lock").delete();
- }
-}
-
diff --git a/src/org/torproject/ernie/cron/GeoipDatabaseImporter.java b/src/org/torproject/ernie/cron/GeoipDatabaseImporter.java
deleted file mode 100644
index cc9e140..0000000
--- a/src/org/torproject/ernie/cron/GeoipDatabaseImporter.java
+++ /dev/null
@@ -1,103 +0,0 @@
-/* Copyright 2011, 2012 The Tor Project
- * See LICENSE for licensing information */
-package org.torproject.ernie.cron;
-
-import java.io.BufferedReader;
-import java.io.File;
-import java.io.FileReader;
-import java.io.IOException;
-import java.sql.Connection;
-import java.sql.DriverManager;
-import java.sql.PreparedStatement;
-import java.sql.SQLException;
-import java.sql.Statement;
-import java.sql.Types;
-import java.util.HashMap;
-import java.util.Map;
-
-/**
- * Import a Maxmind GeoLite City database to resolve resolve IP addresses
- * to country codes, latitudes, and longitudes.
- */
-public class GeoipDatabaseImporter {
- public static void main(String[] args) throws IOException,
- SQLException {
-
- /* Check if the GeoIP database files are in place. */
- File locationsFile = new File("GeoLiteCity-Location.csv"),
- blocksFile = new File("GeoLiteCity-Blocks.csv");
- if (!locationsFile.exists() || !blocksFile.exists()) {
- System.out.println("Could not find GeoLiteCity-Location.csv and/or "
- + "GeoLiteCity-Blocks.csv in the working directory! Exiting!");
- System.exit(1);
- }
-
- /* Initialize configuration to learn JDBC string. */
- Configuration config = new Configuration();
- String jdbcString = config.getRelayDescriptorDatabaseJDBC();
-
- /* Connect to database. */
- Connection c = DriverManager.getConnection(jdbcString);
-
- /* Start by reading location information to memory. */
- BufferedReader br = new BufferedReader(new FileReader(locationsFile));
- String line;
- Map<Integer, String> locations = new HashMap<Integer, String>();
- while ((line = br.readLine()) != null) {
- if (line.startsWith("Copyright") || line.startsWith("locId")) {
- continue;
- }
- String[] parts = line.split(",");
- int locId = Integer.parseInt(parts[0]);
- String country = parts[1].replaceAll("\"", "");
- String latitude = parts[5];
- String longitude = parts[6];
- locations.put(locId, country + "," + latitude + "," + longitude);
- }
- br.close();
-
- /* Parse block information and add it to the database together with
- * the location information. */
- PreparedStatement ps = c.prepareStatement("INSERT INTO geoipdb "
- + "(ipstart, ipend, country, latitude, longitude) VALUES "
- + "(?, ?, ?, ?, ?)");
- Statement s = c.createStatement();
- s.execute("DELETE FROM geoipdb");
- /* TODO The import takes 30+ minutes. Perform the import in a single
- * transaction, or requests will return strange results in these 30+
- * minutes. */
- br = new BufferedReader(new FileReader(blocksFile));
- while ((line = br.readLine()) != null) {
- if (line.startsWith("Copyright") ||
- line.startsWith("startIpNum")) {
- continue;
- }
- String[] parts = line.replaceAll("\"", "").split(",");
- long startIpNum = Long.parseLong(parts[0]);
- String startIp = "" + startIpNum / 256 / 256 / 256 + "."
- + startIpNum / 256 / 256 % 256 + "." + startIpNum / 256 % 256
- + "." + startIpNum % 256;
- long endIpNum = Long.parseLong(parts[1]);
- String endIp = "" + endIpNum / 256 / 256 / 256 + "."
- + endIpNum / 256 / 256 % 256 + "." + endIpNum / 256 % 256 + "."
- + endIpNum % 256;
- int locId = Integer.parseInt(parts[2]);
- if (!locations.containsKey(locId)) {
- System.out.println("Cannot find locId=" + locId
- + " in locations file!");
- continue;
- }
- String[] locationParts = locations.get(locId).split(",");
- String country = locationParts[0];
- double latitude = Double.parseDouble(locationParts[1]);
- double longitude = Double.parseDouble(locationParts[2]);
- ps.setObject(1, startIp, Types.OTHER);
- ps.setObject(2, endIp, Types.OTHER);
- ps.setString(3, country);
- ps.setDouble(4, latitude);
- ps.setDouble(5, longitude);
- ps.execute();
- }
- }
-}
-
diff --git a/src/org/torproject/ernie/cron/Main.java b/src/org/torproject/ernie/cron/Main.java
index fb4a450..c0eb435 100644
--- a/src/org/torproject/ernie/cron/Main.java
+++ b/src/org/torproject/ernie/cron/Main.java
@@ -5,6 +5,11 @@ package org.torproject.ernie.cron;
import java.io.File;
import java.util.logging.Logger;
+import org.torproject.ernie.cron.network.ConsensusStatsFileHandler;
+import org.torproject.ernie.cron.performance.PerformanceStatsImporter;
+import org.torproject.ernie.cron.performance.TorperfProcessor;
+import org.torproject.ernie.cron.users.BridgeStatsFileHandler;
+
/**
* Coordinate downloading and parsing of descriptors and extraction of
* statistically relevant data for later processing with R.
diff --git a/src/org/torproject/ernie/cron/PerformanceStatsImporter.java b/src/org/torproject/ernie/cron/PerformanceStatsImporter.java
deleted file mode 100644
index 105426b..0000000
--- a/src/org/torproject/ernie/cron/PerformanceStatsImporter.java
+++ /dev/null
@@ -1,271 +0,0 @@
-/* Copyright 2012 The Tor Project
- * See LICENSE for licensing information */
-package org.torproject.ernie.cron;
-
-import java.io.BufferedWriter;
-import java.io.File;
-import java.io.FileWriter;
-import java.io.IOException;
-import java.sql.Connection;
-import java.sql.DriverManager;
-import java.sql.PreparedStatement;
-import java.sql.ResultSet;
-import java.sql.SQLException;
-import java.sql.Timestamp;
-import java.text.SimpleDateFormat;
-import java.util.Calendar;
-import java.util.Iterator;
-import java.util.TimeZone;
-import java.util.logging.Level;
-import java.util.logging.Logger;
-
-import org.torproject.descriptor.Descriptor;
-import org.torproject.descriptor.DescriptorFile;
-import org.torproject.descriptor.DescriptorReader;
-import org.torproject.descriptor.DescriptorSourceFactory;
-import org.torproject.descriptor.ExtraInfoDescriptor;
-
-public class PerformanceStatsImporter {
-
- /**
- * How many records to commit with each database transaction.
- */
- private final long autoCommitCount = 500;
-
- /**
- * Keep track of the number of records committed before each transaction
- */
- private int rbsCount = 0;
-
- /**
- * Relay descriptor database connection.
- */
- private Connection conn;
-
- /**
- * Prepared statement to check whether a given conn-bi-direct stats
- * string has been imported into the database before.
- */
- private PreparedStatement psBs;
-
- /**
- * Prepared statement to insert a conn-bi-direct stats string into the
- * database.
- */
- private PreparedStatement psB;
-
- /**
- * Logger for this class.
- */
- private Logger logger;
-
- /**
- * Directory for writing raw import files.
- */
- private String rawFilesDirectory;
-
- /**
- * Raw import file containing conn-bi-direct stats strings.
- */
- private BufferedWriter connBiDirectOut;
-
- /**
- * Date format to parse timestamps.
- */
- private SimpleDateFormat dateTimeFormat;
-
- private boolean importIntoDatabase;
- private boolean writeRawImportFiles;
-
- private File archivesDirectory;
- private File statsDirectory;
- private boolean keepImportHistory;
-
- /**
- * Initialize database importer by connecting to the database and
- * preparing statements.
- */
- public PerformanceStatsImporter(String connectionURL,
- String rawFilesDirectory, File archivesDirectory,
- File statsDirectory, boolean keepImportHistory) {
-
- if (archivesDirectory == null ||
- statsDirectory == null) {
- throw new IllegalArgumentException();
- }
- this.archivesDirectory = archivesDirectory;
- this.statsDirectory = statsDirectory;
- this.keepImportHistory = keepImportHistory;
-
- /* Initialize logger. */
- this.logger = Logger.getLogger(
- RelayDescriptorDatabaseImporter.class.getName());
-
- if (connectionURL != null) {
- try {
- /* Connect to database. */
- this.conn = DriverManager.getConnection(connectionURL);
-
- /* Turn autocommit off */
- this.conn.setAutoCommit(false);
-
- /* Prepare statements. */
- this.psBs = conn.prepareStatement("SELECT COUNT(*) "
- + "FROM connbidirect WHERE source = ? AND statsend = ?");
- this.psB = conn.prepareStatement("INSERT INTO connbidirect "
- + "(source, statsend, seconds, belownum, readnum, writenum, "
- + "bothnum) VALUES (?, ?, ?, ?, ?, ?, ?)");
- this.importIntoDatabase = true;
- } catch (SQLException e) {
- this.logger.log(Level.WARNING, "Could not connect to database or "
- + "prepare statements.", e);
- }
- }
-
- /* Remember where we want to write raw import files. */
- if (rawFilesDirectory != null) {
- this.rawFilesDirectory = rawFilesDirectory;
- this.writeRawImportFiles = true;
- }
-
- /* Initialize date format, so that we can format timestamps. */
- this.dateTimeFormat = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
- this.dateTimeFormat.setTimeZone(TimeZone.getTimeZone("UTC"));
- }
-
- /**
- * Insert a conn-bi-direct stats string into the database.
- */
- private void addConnBiDirect(String source, long statsEndMillis,
- long seconds, long below, long read, long write, long both) {
- String statsEnd = this.dateTimeFormat.format(statsEndMillis);
- if (this.importIntoDatabase) {
- try {
- Calendar cal = Calendar.getInstance(TimeZone.getTimeZone("UTC"));
- Timestamp statsEndTimestamp = new Timestamp(statsEndMillis);
- this.psBs.setString(1, source);
- this.psBs.setTimestamp(2, statsEndTimestamp, cal);
- ResultSet rs = psBs.executeQuery();
- rs.next();
- if (rs.getInt(1) == 0) {
- this.psB.clearParameters();
- this.psB.setString(1, source);
- this.psB.setTimestamp(2, statsEndTimestamp, cal);
- this.psB.setLong(3, seconds);
- this.psB.setLong(4, below);
- this.psB.setLong(5, read);
- this.psB.setLong(6, write);
- this.psB.setLong(7, both);
- this.psB.executeUpdate();
- rbsCount++;
- if (rbsCount % autoCommitCount == 0) {
- this.conn.commit();
- }
- }
- } catch (SQLException e) {
- this.logger.log(Level.WARNING, "Could not add conn-bi-direct "
- + "stats string. We won't make any further SQL requests in "
- + "this execution.", e);
- this.importIntoDatabase = false;
- }
- }
- if (this.writeRawImportFiles) {
- try {
- if (this.connBiDirectOut == null) {
- new File(rawFilesDirectory).mkdirs();
- this.connBiDirectOut = new BufferedWriter(new FileWriter(
- rawFilesDirectory + "/connbidirect.sql"));
- this.connBiDirectOut.write(" COPY connbidirect (source, "
- + "statsend, seconds, belownum, readnum, writenum, "
- + "bothnum) FROM stdin;\n");
- }
- this.connBiDirectOut.write(source + "\t" + statsEnd + "\t"
- + seconds + "\t" + below + "\t" + read + "\t" + write + "\t"
- + both + "\n");
- } catch (IOException e) {
- this.logger.log(Level.WARNING, "Could not write conn-bi-direct "
- + "stats string to raw database import file. We won't make "
- + "any further attempts to write raw import files in this "
- + "execution.", e);
- this.writeRawImportFiles = false;
- }
- }
- }
-
- void importRelayDescriptors() {
- if (archivesDirectory.exists()) {
- logger.fine("Importing files in directory " + archivesDirectory
- + "/...");
- DescriptorReader reader =
- DescriptorSourceFactory.createDescriptorReader();
- reader.addDirectory(archivesDirectory);
- if (keepImportHistory) {
- reader.setExcludeFiles(new File(statsDirectory,
- "performance-stats-relay-descriptor-history"));
- }
- Iterator<DescriptorFile> descriptorFiles = reader.readDescriptors();
- while (descriptorFiles.hasNext()) {
- DescriptorFile descriptorFile = descriptorFiles.next();
- if (descriptorFile.getDescriptors() != null) {
- for (Descriptor descriptor : descriptorFile.getDescriptors()) {
- if (descriptor instanceof ExtraInfoDescriptor) {
- this.addExtraInfoDescriptor(
- (ExtraInfoDescriptor) descriptor);
- }
- }
- }
- }
- }
-
- logger.info("Finished importing relay descriptors.");
- }
-
- private void addExtraInfoDescriptor(ExtraInfoDescriptor descriptor) {
- if (descriptor.getConnBiDirectStatsEndMillis() >= 0L) {
- this.addConnBiDirect(descriptor.getFingerprint(),
- descriptor.getConnBiDirectStatsEndMillis(),
- descriptor.getConnBiDirectStatsIntervalLength(),
- descriptor.getConnBiDirectBelow(),
- descriptor.getConnBiDirectRead(),
- descriptor.getConnBiDirectWrite(),
- descriptor.getConnBiDirectBoth());
- }
- }
-
- /**
- * Close the relay descriptor database connection.
- */
- void closeConnection() {
-
- /* Log stats about imported descriptors. */
- this.logger.info(String.format("Finished importing relay "
- + "descriptors: %d conn-bi-direct stats lines", rbsCount));
-
- /* Commit any stragglers before closing. */
- if (this.conn != null) {
- try {
- this.conn.commit();
- } catch (SQLException e) {
- this.logger.log(Level.WARNING, "Could not commit final records "
- + "to database", e);
- }
- try {
- this.conn.close();
- } catch (SQLException e) {
- this.logger.log(Level.WARNING, "Could not close database "
- + "connection.", e);
- }
- }
-
- /* Close raw import files. */
- try {
- if (this.connBiDirectOut != null) {
- this.connBiDirectOut.write("\\.\n");
- this.connBiDirectOut.close();
- }
- } catch (IOException e) {
- this.logger.log(Level.WARNING, "Could not close one or more raw "
- + "database import files.", e);
- }
- }
-}
diff --git a/src/org/torproject/ernie/cron/RelayDescriptorDatabaseImporter.java b/src/org/torproject/ernie/cron/RelayDescriptorDatabaseImporter.java
index 5d2724a..a51092e 100644
--- a/src/org/torproject/ernie/cron/RelayDescriptorDatabaseImporter.java
+++ b/src/org/torproject/ernie/cron/RelayDescriptorDatabaseImporter.java
@@ -44,6 +44,9 @@ import org.torproject.descriptor.ServerDescriptor;
* Parse directory data.
*/
+/* TODO Split up this class and move its parts to cron.network,
+ * cron.users, and status.relaysearch packages. Requires extensive
+ * changes to the database schema though. */
public final class RelayDescriptorDatabaseImporter {
/**
diff --git a/src/org/torproject/ernie/cron/TorperfProcessor.java b/src/org/torproject/ernie/cron/TorperfProcessor.java
deleted file mode 100644
index 5d43150..0000000
--- a/src/org/torproject/ernie/cron/TorperfProcessor.java
+++ /dev/null
@@ -1,374 +0,0 @@
-/* Copyright 2011, 2012 The Tor Project
- * See LICENSE for licensing information */
-package org.torproject.ernie.cron;
-
-import java.io.BufferedReader;
-import java.io.BufferedWriter;
-import java.io.File;
-import java.io.FileReader;
-import java.io.FileWriter;
-import java.io.IOException;
-import java.sql.Connection;
-import java.sql.DriverManager;
-import java.sql.PreparedStatement;
-import java.sql.ResultSet;
-import java.sql.SQLException;
-import java.sql.Statement;
-import java.text.SimpleDateFormat;
-import java.util.ArrayList;
-import java.util.Collections;
-import java.util.HashMap;
-import java.util.HashSet;
-import java.util.Iterator;
-import java.util.List;
-import java.util.Map;
-import java.util.Set;
-import java.util.SortedMap;
-import java.util.TimeZone;
-import java.util.TreeMap;
-import java.util.logging.Level;
-import java.util.logging.Logger;
-
-import org.torproject.descriptor.Descriptor;
-import org.torproject.descriptor.DescriptorFile;
-import org.torproject.descriptor.DescriptorReader;
-import org.torproject.descriptor.DescriptorSourceFactory;
-import org.torproject.descriptor.TorperfResult;
-
-public class TorperfProcessor {
- public TorperfProcessor(File torperfDirectory, File statsDirectory,
- String connectionURL) {
-
- if (torperfDirectory == null || statsDirectory == null) {
- throw new IllegalArgumentException();
- }
-
- Logger logger = Logger.getLogger(TorperfProcessor.class.getName());
- File rawFile = new File(statsDirectory, "torperf-raw");
- File statsFile = new File(statsDirectory, "torperf-stats");
- SortedMap<String, String> rawObs = new TreeMap<String, String>();
- SortedMap<String, String> stats = new TreeMap<String, String>();
- int addedRawObs = 0;
- SimpleDateFormat formatter =
- new SimpleDateFormat("yyyy-MM-dd,HH:mm:ss");
- formatter.setTimeZone(TimeZone.getTimeZone("UTC"));
- try {
- if (rawFile.exists()) {
- logger.fine("Reading file " + rawFile.getAbsolutePath() + "...");
- BufferedReader br = new BufferedReader(new FileReader(rawFile));
- String line = br.readLine(); // ignore header
- while ((line = br.readLine()) != null) {
- if (line.split(",").length != 4) {
- logger.warning("Corrupt line in " + rawFile.getAbsolutePath()
- + "!");
- break;
- }
- String key = line.substring(0, line.lastIndexOf(","));
- rawObs.put(key, line);
- }
- br.close();
- logger.fine("Finished reading file " + rawFile.getAbsolutePath()
- + ".");
- }
- if (statsFile.exists()) {
- logger.fine("Reading file " + statsFile.getAbsolutePath()
- + "...");
- BufferedReader br = new BufferedReader(new FileReader(statsFile));
- String line = br.readLine(); // ignore header
- while ((line = br.readLine()) != null) {
- String key = line.split(",")[0] + "," + line.split(",")[1];
- stats.put(key, line);
- }
- br.close();
- logger.fine("Finished reading file " + statsFile.getAbsolutePath()
- + ".");
- }
- if (torperfDirectory.exists()) {
- logger.fine("Importing files in " + torperfDirectory + "/...");
- DescriptorReader descriptorReader =
- DescriptorSourceFactory.createDescriptorReader();
- descriptorReader.addDirectory(torperfDirectory);
- descriptorReader.setExcludeFiles(new File(statsDirectory,
- "torperf-history"));
- Iterator<DescriptorFile> descriptorFiles =
- descriptorReader.readDescriptors();
- while (descriptorFiles.hasNext()) {
- DescriptorFile descriptorFile = descriptorFiles.next();
- if (descriptorFile.getException() != null) {
- logger.log(Level.FINE, "Error parsing file.",
- descriptorFile.getException());
- continue;
- }
- for (Descriptor descriptor : descriptorFile.getDescriptors()) {
- if (!(descriptor instanceof TorperfResult)) {
- continue;
- }
- TorperfResult result = (TorperfResult) descriptor;
- String source = result.getSource();
- long fileSize = result.getFileSize();
- if (fileSize == 51200) {
- source += "-50kb";
- } else if (fileSize == 1048576) {
- source += "-1mb";
- } else if (fileSize == 5242880) {
- source += "-5mb";
- } else {
- logger.fine("Unexpected file size '" + fileSize
- + "'. Skipping.");
- continue;
- }
- String dateTime = formatter.format(result.getStartMillis());
- long completeMillis = result.getDataCompleteMillis()
- - result.getStartMillis();
- String key = source + "," + dateTime;
- String value = key;
- if ((result.didTimeout() == null &&
- result.getDataCompleteMillis() < 1) ||
- (result.didTimeout() != null && result.didTimeout())) {
- value += ",-2"; // -2 for timeout
- } else if (result.getReadBytes() < fileSize) {
- value += ",-1"; // -1 for failure
- } else {
- value += "," + completeMillis;
- }
- if (!rawObs.containsKey(key)) {
- rawObs.put(key, value);
- addedRawObs++;
- }
- }
- }
- logger.fine("Finished importing files in " + torperfDirectory
- + "/.");
- }
- if (rawObs.size() > 0) {
- logger.fine("Writing file " + rawFile.getAbsolutePath() + "...");
- rawFile.getParentFile().mkdirs();
- BufferedWriter bw = new BufferedWriter(new FileWriter(rawFile));
- bw.append("source,date,start,completemillis\n");
- String tempSourceDate = null;
- Iterator<Map.Entry<String, String>> it =
- rawObs.entrySet().iterator();
- List<Long> dlTimes = new ArrayList<Long>();
- boolean haveWrittenFinalLine = false;
- SortedMap<String, List<Long>> dlTimesAllSources =
- new TreeMap<String, List<Long>>();
- SortedMap<String, long[]> statusesAllSources =
- new TreeMap<String, long[]>();
- long failures = 0, timeouts = 0, requests = 0;
- while (it.hasNext() || !haveWrittenFinalLine) {
- Map.Entry<String, String> next = it.hasNext() ? it.next() : null;
- if (tempSourceDate != null
- && (next == null || !(next.getValue().split(",")[0] + ","
- + next.getValue().split(",")[1]).equals(tempSourceDate))) {
- if (dlTimes.size() > 4) {
- Collections.sort(dlTimes);
- long q1 = dlTimes.get(dlTimes.size() / 4 - 1);
- long md = dlTimes.get(dlTimes.size() / 2 - 1);
- long q3 = dlTimes.get(dlTimes.size() * 3 / 4 - 1);
- stats.put(tempSourceDate, tempSourceDate + "," + q1 + ","
- + md + "," + q3 + "," + timeouts + "," + failures + ","
- + requests);
- String allSourceDate = "all" + tempSourceDate.substring(
- tempSourceDate.indexOf("-"));
- if (dlTimesAllSources.containsKey(allSourceDate)) {
- dlTimesAllSources.get(allSourceDate).addAll(dlTimes);
- } else {
- dlTimesAllSources.put(allSourceDate, dlTimes);
- }
- if (statusesAllSources.containsKey(allSourceDate)) {
- long[] status = statusesAllSources.get(allSourceDate);
- status[0] += timeouts;
- status[1] += failures;
- status[2] += requests;
- } else {
- long[] status = new long[3];
- status[0] = timeouts;
- status[1] = failures;
- status[2] = requests;
- statusesAllSources.put(allSourceDate, status);
- }
- }
- dlTimes = new ArrayList<Long>();
- failures = timeouts = requests = 0;
- if (next == null) {
- haveWrittenFinalLine = true;
- }
- }
- if (next != null) {
- bw.append(next.getValue() + "\n");
- String[] parts = next.getValue().split(",");
- tempSourceDate = parts[0] + "," + parts[1];
- long completeMillis = Long.parseLong(parts[3]);
- if (completeMillis == -2L) {
- timeouts++;
- } else if (completeMillis == -1L) {
- failures++;
- } else {
- dlTimes.add(Long.parseLong(parts[3]));
- }
- requests++;
- }
- }
- bw.close();
- for (Map.Entry<String, List<Long>> e :
- dlTimesAllSources.entrySet()) {
- String allSourceDate = e.getKey();
- dlTimes = e.getValue();
- Collections.sort(dlTimes);
- long q1 = dlTimes.get(dlTimes.size() / 4 - 1);
- long md = dlTimes.get(dlTimes.size() / 2 - 1);
- long q3 = dlTimes.get(dlTimes.size() * 3 / 4 - 1);
- long[] status = statusesAllSources.get(allSourceDate);
- timeouts = status[0];
- failures = status[1];
- requests = status[2];
- stats.put(allSourceDate, allSourceDate + "," + q1 + "," + md
- + "," + q3 + "," + timeouts + "," + failures + ","
- + requests);
- }
- logger.fine("Finished writing file " + rawFile.getAbsolutePath()
- + ".");
- }
- if (stats.size() > 0) {
- logger.fine("Writing file " + statsFile.getAbsolutePath()
- + "...");
- statsFile.getParentFile().mkdirs();
- BufferedWriter bw = new BufferedWriter(new FileWriter(statsFile));
- bw.append("source,date,q1,md,q3,timeouts,failures,requests\n");
- for (String s : stats.values()) {
- bw.append(s + "\n");
- }
- bw.close();
- logger.fine("Finished writing file " + statsFile.getAbsolutePath()
- + ".");
- }
- } catch (IOException e) {
- logger.log(Level.WARNING, "Failed writing "
- + rawFile.getAbsolutePath() + " or "
- + statsFile.getAbsolutePath() + "!", e);
- }
-
- /* Write stats. */
- StringBuilder dumpStats = new StringBuilder("Finished writing "
- + "statistics on torperf results.\nAdded " + addedRawObs
- + " new observations in this execution.\n"
- + "Last known obserations by source and file size are:");
- String lastSource = null;
- String lastLine = null;
- for (String s : rawObs.keySet()) {
- String[] parts = s.split(",");
- if (lastSource == null) {
- lastSource = parts[0];
- } else if (!parts[0].equals(lastSource)) {
- String lastKnownObservation = lastLine.split(",")[1] + " "
- + lastLine.split(",")[2];
- dumpStats.append("\n" + lastSource + " " + lastKnownObservation);
- lastSource = parts[0];
- }
- lastLine = s;
- }
- if (lastSource != null) {
- String lastKnownObservation = lastLine.split(",")[1] + " "
- + lastLine.split(",")[2];
- dumpStats.append("\n" + lastSource + " " + lastKnownObservation);
- }
- logger.info(dumpStats.toString());
-
- /* Write results to database. */
- if (connectionURL != null) {
- try {
- Map<String, String> insertRows = new HashMap<String, String>();
- insertRows.putAll(stats);
- Set<String> updateRows = new HashSet<String>();
- Connection conn = DriverManager.getConnection(connectionURL);
- conn.setAutoCommit(false);
- Statement statement = conn.createStatement();
- ResultSet rs = statement.executeQuery(
- "SELECT date, source, q1, md, q3, timeouts, failures, "
- + "requests FROM torperf_stats");
- while (rs.next()) {
- String date = rs.getDate(1).toString();
- String source = rs.getString(2);
- String key = source + "," + date;
- if (insertRows.containsKey(key)) {
- String insertRow = insertRows.remove(key);
- String[] newStats = insertRow.split(",");
- long newQ1 = Long.parseLong(newStats[2]);
- long newMd = Long.parseLong(newStats[3]);
- long newQ3 = Long.parseLong(newStats[4]);
- long newTimeouts = Long.parseLong(newStats[5]);
- long newFailures = Long.parseLong(newStats[6]);
- long newRequests = Long.parseLong(newStats[7]);
- long oldQ1 = rs.getLong(3);
- long oldMd = rs.getLong(4);
- long oldQ3 = rs.getLong(5);
- long oldTimeouts = rs.getLong(6);
- long oldFailures = rs.getLong(7);
- long oldRequests = rs.getLong(8);
- if (newQ1 != oldQ1 || newMd != oldMd || newQ3 != oldQ3 ||
- newTimeouts != oldTimeouts ||
- newFailures != oldFailures ||
- newRequests != oldRequests) {
- updateRows.add(insertRow);
- }
- }
- }
- PreparedStatement psU = conn.prepareStatement(
- "UPDATE torperf_stats SET q1 = ?, md = ?, q3 = ?, "
- + "timeouts = ?, failures = ?, requests = ? "
- + "WHERE date = ? AND source = ?");
- for (String row : updateRows) {
- String[] newStats = row.split(",");
- String source = newStats[0];
- java.sql.Date date = java.sql.Date.valueOf(newStats[1]);
- long q1 = Long.parseLong(newStats[2]);
- long md = Long.parseLong(newStats[3]);
- long q3 = Long.parseLong(newStats[4]);
- long timeouts = Long.parseLong(newStats[5]);
- long failures = Long.parseLong(newStats[6]);
- long requests = Long.parseLong(newStats[7]);
- psU.clearParameters();
- psU.setLong(1, q1);
- psU.setLong(2, md);
- psU.setLong(3, q3);
- psU.setLong(4, timeouts);
- psU.setLong(5, failures);
- psU.setLong(6, requests);
- psU.setDate(7, date);
- psU.setString(8, source);
- psU.executeUpdate();
- }
- PreparedStatement psI = conn.prepareStatement(
- "INSERT INTO torperf_stats (q1, md, q3, timeouts, failures, "
- + "requests, date, source) VALUES (?, ?, ?, ?, ?, ?, ?, ?)");
- for (String row : insertRows.values()) {
- String[] newStats = row.split(",");
- String source = newStats[0];
- java.sql.Date date = java.sql.Date.valueOf(newStats[1]);
- long q1 = Long.parseLong(newStats[2]);
- long md = Long.parseLong(newStats[3]);
- long q3 = Long.parseLong(newStats[4]);
- long timeouts = Long.parseLong(newStats[5]);
- long failures = Long.parseLong(newStats[6]);
- long requests = Long.parseLong(newStats[7]);
- psI.clearParameters();
- psI.setLong(1, q1);
- psI.setLong(2, md);
- psI.setLong(3, q3);
- psI.setLong(4, timeouts);
- psI.setLong(5, failures);
- psI.setLong(6, requests);
- psI.setDate(7, date);
- psI.setString(8, source);
- psI.executeUpdate();
- }
- conn.commit();
- conn.close();
- } catch (SQLException e) {
- logger.log(Level.WARNING, "Failed to add torperf stats to "
- + "database.", e);
- }
- }
- }
-}
-
diff --git a/src/org/torproject/ernie/cron/network/ConsensusStatsFileHandler.java b/src/org/torproject/ernie/cron/network/ConsensusStatsFileHandler.java
new file mode 100644
index 0000000..d5cae37
--- /dev/null
+++ b/src/org/torproject/ernie/cron/network/ConsensusStatsFileHandler.java
@@ -0,0 +1,380 @@
+/* Copyright 2011, 2012 The Tor Project
+ * See LICENSE for licensing information */
+package org.torproject.ernie.cron.network;
+
+import java.io.BufferedReader;
+import java.io.BufferedWriter;
+import java.io.File;
+import java.io.FileReader;
+import java.io.FileWriter;
+import java.io.IOException;
+import java.sql.Connection;
+import java.sql.DriverManager;
+import java.sql.PreparedStatement;
+import java.sql.ResultSet;
+import java.sql.SQLException;
+import java.sql.Statement;
+import java.text.ParseException;
+import java.text.SimpleDateFormat;
+import java.util.HashMap;
+import java.util.Iterator;
+import java.util.Map;
+import java.util.SortedMap;
+import java.util.TimeZone;
+import java.util.TreeMap;
+import java.util.logging.Level;
+import java.util.logging.Logger;
+
+import org.torproject.descriptor.BridgeNetworkStatus;
+import org.torproject.descriptor.Descriptor;
+import org.torproject.descriptor.DescriptorFile;
+import org.torproject.descriptor.DescriptorReader;
+import org.torproject.descriptor.DescriptorSourceFactory;
+import org.torproject.descriptor.NetworkStatusEntry;
+
+/**
+ * Generates statistics on the average number of relays and bridges per
+ * day. Accepts parse results from <code>RelayDescriptorParser</code> and
+ * <code>BridgeDescriptorParser</code> and stores them in intermediate
+ * result files <code>stats/consensus-stats-raw</code> and
+ * <code>stats/bridge-consensus-stats-raw</code>. Writes final results to
+ * <code>stats/consensus-stats</code> for all days for which at least half
+ * of the expected consensuses or statuses are known.
+ */
+public class ConsensusStatsFileHandler {
+
+ /**
+ * Intermediate results file holding the number of running bridges per
+ * bridge status.
+ */
+ private File bridgeConsensusStatsRawFile;
+
+ /**
+ * Number of running bridges in a given bridge status. Map keys are
+ * bridge status times formatted as "yyyy-MM-dd HH:mm:ss", map values
+ * are lines as read from <code>stats/bridge-consensus-stats-raw</code>.
+ */
+ private SortedMap<String, String> bridgesRaw;
+
+ /**
+ * Average number of running bridges per day. Map keys are dates
+ * formatted as "yyyy-MM-dd", map values are the last column as written
+ * to <code>stats/consensus-stats</code>.
+ */
+ private SortedMap<String, String> bridgesPerDay;
+
+ /**
+ * Logger for this class.
+ */
+ private Logger logger;
+
+ private int bridgeResultsAdded = 0;
+
+ /* Database connection string. */
+ private String connectionURL = null;
+
+ private SimpleDateFormat dateTimeFormat;
+
+ private File bridgesDir;
+
+ private File statsDirectory;
+
+ private boolean keepImportHistory;
+
+ /**
+ * Initializes this class, including reading in intermediate results
+ * files <code>stats/consensus-stats-raw</code> and
+ * <code>stats/bridge-consensus-stats-raw</code> and final results file
+ * <code>stats/consensus-stats</code>.
+ */
+ public ConsensusStatsFileHandler(String connectionURL,
+ File bridgesDir, File statsDirectory,
+ boolean keepImportHistory) {
+
+ if (bridgesDir == null || statsDirectory == null) {
+ throw new IllegalArgumentException();
+ }
+ this.bridgesDir = bridgesDir;
+ this.statsDirectory = statsDirectory;
+ this.keepImportHistory = keepImportHistory;
+
+ /* Initialize local data structures to hold intermediate and final
+ * results. */
+ this.bridgesPerDay = new TreeMap<String, String>();
+ this.bridgesRaw = new TreeMap<String, String>();
+
+ /* Initialize file names for intermediate and final results files. */
+ this.bridgeConsensusStatsRawFile = new File(
+ "stats/bridge-consensus-stats-raw");
+
+ /* Initialize database connection string. */
+ this.connectionURL = connectionURL;
+
+ this.dateTimeFormat = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
+ this.dateTimeFormat.setTimeZone(TimeZone.getTimeZone("UTC"));
+
+ /* Initialize logger. */
+ this.logger = Logger.getLogger(
+ ConsensusStatsFileHandler.class.getName());
+
+ /* Read in number of running bridges per bridge status. */
+ if (this.bridgeConsensusStatsRawFile.exists()) {
+ try {
+ this.logger.fine("Reading file "
+ + this.bridgeConsensusStatsRawFile.getAbsolutePath() + "...");
+ BufferedReader br = new BufferedReader(new FileReader(
+ this.bridgeConsensusStatsRawFile));
+ String line = null;
+ while ((line = br.readLine()) != null) {
+ if (line.startsWith("date")) {
+ /* Skip headers. */
+ continue;
+ }
+ String[] parts = line.split(",");
+ String dateTime = parts[0];
+ if (parts.length == 2) {
+ this.bridgesRaw.put(dateTime, line + ",0");
+ } else if (parts.length == 3) {
+ this.bridgesRaw.put(dateTime, line);
+ } else {
+ this.logger.warning("Corrupt line '" + line + "' in file "
+ + this.bridgeConsensusStatsRawFile.getAbsolutePath()
+ + "! Aborting to read this file!");
+ break;
+ }
+ }
+ br.close();
+ this.logger.fine("Finished reading file "
+ + this.bridgeConsensusStatsRawFile.getAbsolutePath() + ".");
+ } catch (IOException e) {
+ this.logger.log(Level.WARNING, "Failed to read file "
+ + this.bridgeConsensusStatsRawFile.getAbsolutePath() + "!",
+ e);
+ }
+ }
+ }
+
+ /**
+ * Adds the intermediate results of the number of running bridges in a
+ * given bridge status to the existing observations.
+ */
+ public void addBridgeConsensusResults(long publishedMillis, int running,
+ int runningEc2Bridges) {
+ String published = dateTimeFormat.format(publishedMillis);
+ String line = published + "," + running + "," + runningEc2Bridges;
+ if (!this.bridgesRaw.containsKey(published)) {
+ this.logger.finer("Adding new bridge numbers: " + line);
+ this.bridgesRaw.put(published, line);
+ this.bridgeResultsAdded++;
+ } else if (!line.equals(this.bridgesRaw.get(published))) {
+ this.logger.warning("The numbers of running bridges we were just "
+ + "given (" + line + ") are different from what we learned "
+ + "before (" + this.bridgesRaw.get(published) + ")! "
+ + "Overwriting!");
+ this.bridgesRaw.put(published, line);
+ }
+ }
+
+ public void importSanitizedBridges() {
+ if (bridgesDir.exists()) {
+ logger.fine("Importing files in directory " + bridgesDir + "/...");
+ DescriptorReader reader =
+ DescriptorSourceFactory.createDescriptorReader();
+ reader.addDirectory(bridgesDir);
+ if (keepImportHistory) {
+ reader.setExcludeFiles(new File(statsDirectory,
+ "consensus-stats-bridge-descriptor-history"));
+ }
+ Iterator<DescriptorFile> descriptorFiles = reader.readDescriptors();
+ while (descriptorFiles.hasNext()) {
+ DescriptorFile descriptorFile = descriptorFiles.next();
+ if (descriptorFile.getDescriptors() != null) {
+ for (Descriptor descriptor : descriptorFile.getDescriptors()) {
+ if (descriptor instanceof BridgeNetworkStatus) {
+ this.addBridgeNetworkStatus(
+ (BridgeNetworkStatus) descriptor);
+ }
+ }
+ }
+ }
+ logger.info("Finished importing bridge descriptors.");
+ }
+ }
+
+ private void addBridgeNetworkStatus(BridgeNetworkStatus status) {
+ int runningBridges = 0, runningEc2Bridges = 0;
+ for (NetworkStatusEntry statusEntry :
+ status.getStatusEntries().values()) {
+ if (statusEntry.getFlags().contains("Running")) {
+ runningBridges++;
+ if (statusEntry.getNickname().startsWith("ec2bridge")) {
+ runningEc2Bridges++;
+ }
+ }
+ }
+ this.addBridgeConsensusResults(status.getPublishedMillis(),
+ runningBridges, runningEc2Bridges);
+ }
+
+ /**
+ * Aggregates the raw observations on relay and bridge numbers and
+ * writes both raw and aggregate observations to disk.
+ */
+ public void writeFiles() {
+
+ /* Go through raw observations of numbers of running bridges in bridge
+ * statuses, calculate averages per day, and add these averages to
+ * final results. */
+ if (!this.bridgesRaw.isEmpty()) {
+ String tempDate = null;
+ int brunning = 0, brunningEc2 = 0, statuses = 0;
+ Iterator<String> it = this.bridgesRaw.values().iterator();
+ boolean haveWrittenFinalLine = false;
+ while (it.hasNext() || !haveWrittenFinalLine) {
+ String next = it.hasNext() ? it.next() : null;
+ /* Finished reading a day or even all lines? */
+ if (tempDate != null && (next == null
+ || !next.substring(0, 10).equals(tempDate))) {
+ /* Only write results if we have seen at least half of all
+ * statuses. */
+ if (statuses >= 24) {
+ String line = "," + (brunning / statuses) + ","
+ + (brunningEc2 / statuses);
+ /* Are our results new? */
+ if (!this.bridgesPerDay.containsKey(tempDate)) {
+ this.logger.finer("Adding new average bridge numbers: "
+ + tempDate + line);
+ this.bridgesPerDay.put(tempDate, line);
+ } else if (!line.equals(this.bridgesPerDay.get(tempDate))) {
+ this.logger.finer("Replacing existing average bridge "
+ + "numbers (" + this.bridgesPerDay.get(tempDate)
+ + " with new numbers: " + line);
+ this.bridgesPerDay.put(tempDate, line);
+ }
+ }
+ brunning = brunningEc2 = statuses = 0;
+ haveWrittenFinalLine = (next == null);
+ }
+ /* Sum up number of running bridges. */
+ if (next != null) {
+ tempDate = next.substring(0, 10);
+ statuses++;
+ String[] parts = next.split(",");
+ brunning += Integer.parseInt(parts[1]);
+ brunningEc2 += Integer.parseInt(parts[2]);
+ }
+ }
+ }
+
+ /* Write raw numbers of running bridges to disk. */
+ try {
+ this.logger.fine("Writing file "
+ + this.bridgeConsensusStatsRawFile.getAbsolutePath() + "...");
+ this.bridgeConsensusStatsRawFile.getParentFile().mkdirs();
+ BufferedWriter bw = new BufferedWriter(
+ new FileWriter(this.bridgeConsensusStatsRawFile));
+ bw.append("datetime,brunning,brunningec2\n");
+ for (String line : this.bridgesRaw.values()) {
+ bw.append(line + "\n");
+ }
+ bw.close();
+ this.logger.fine("Finished writing file "
+ + this.bridgeConsensusStatsRawFile.getAbsolutePath() + ".");
+ } catch (IOException e) {
+ this.logger.log(Level.WARNING, "Failed to write file "
+ + this.bridgeConsensusStatsRawFile.getAbsolutePath() + "!",
+ e);
+ }
+
+ /* Add average number of bridges per day to the database. */
+ if (connectionURL != null) {
+ try {
+ Map<String, String> insertRows = new HashMap<String, String>(),
+ updateRows = new HashMap<String, String>();
+ insertRows.putAll(this.bridgesPerDay);
+ Connection conn = DriverManager.getConnection(connectionURL);
+ conn.setAutoCommit(false);
+ Statement statement = conn.createStatement();
+ ResultSet rs = statement.executeQuery(
+ "SELECT date, avg_running, avg_running_ec2 "
+ + "FROM bridge_network_size");
+ while (rs.next()) {
+ String date = rs.getDate(1).toString();
+ if (insertRows.containsKey(date)) {
+ String insertRow = insertRows.remove(date);
+ String[] parts = insertRow.substring(1).split(",");
+ long newAvgRunning = Long.parseLong(parts[0]);
+ long newAvgRunningEc2 = Long.parseLong(parts[1]);
+ long oldAvgRunning = rs.getLong(2);
+ long oldAvgRunningEc2 = rs.getLong(3);
+ if (newAvgRunning != oldAvgRunning ||
+ newAvgRunningEc2 != oldAvgRunningEc2) {
+ updateRows.put(date, insertRow);
+ }
+ }
+ }
+ rs.close();
+ PreparedStatement psU = conn.prepareStatement(
+ "UPDATE bridge_network_size SET avg_running = ?, "
+ + "avg_running_ec2 = ? WHERE date = ?");
+ for (Map.Entry<String, String> e : updateRows.entrySet()) {
+ java.sql.Date date = java.sql.Date.valueOf(e.getKey());
+ String[] parts = e.getValue().substring(1).split(",");
+ long avgRunning = Long.parseLong(parts[0]);
+ long avgRunningEc2 = Long.parseLong(parts[1]);
+ psU.clearParameters();
+ psU.setLong(1, avgRunning);
+ psU.setLong(2, avgRunningEc2);
+ psU.setDate(3, date);
+ psU.executeUpdate();
+ }
+ PreparedStatement psI = conn.prepareStatement(
+ "INSERT INTO bridge_network_size (avg_running, "
+ + "avg_running_ec2, date) VALUES (?, ?, ?)");
+ for (Map.Entry<String, String> e : insertRows.entrySet()) {
+ java.sql.Date date = java.sql.Date.valueOf(e.getKey());
+ String[] parts = e.getValue().substring(1).split(",");
+ long avgRunning = Long.parseLong(parts[0]);
+ long avgRunningEc2 = Long.parseLong(parts[1]);
+ psI.clearParameters();
+ psI.setLong(1, avgRunning);
+ psI.setLong(2, avgRunningEc2);
+ psI.setDate(3, date);
+ psI.executeUpdate();
+ }
+ conn.commit();
+ conn.close();
+ } catch (SQLException e) {
+ logger.log(Level.WARNING, "Failed to add average bridge numbers "
+ + "to database.", e);
+ }
+ }
+
+ /* Write stats. */
+ StringBuilder dumpStats = new StringBuilder("Finished writing "
+ + "statistics on bridge network statuses to disk.\nAdded "
+ + this.bridgeResultsAdded + " bridge network status(es) in this "
+ + "execution.");
+ long now = System.currentTimeMillis();
+ SimpleDateFormat dateTimeFormat =
+ new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
+ dateTimeFormat.setTimeZone(TimeZone.getTimeZone("UTC"));
+ if (this.bridgesRaw.isEmpty()) {
+ dumpStats.append("\nNo bridge status known yet.");
+ } else {
+ dumpStats.append("\nLast known bridge status was published "
+ + this.bridgesRaw.lastKey() + ".");
+ try {
+ if (now - 6L * 60L * 60L * 1000L > dateTimeFormat.parse(
+ this.bridgesRaw.lastKey()).getTime()) {
+ logger.warning("Last known bridge status is more than 6 hours "
+ + "old: " + this.bridgesRaw.lastKey());
+ }
+ } catch (ParseException e) {
+ /* Can't parse the timestamp? Whatever. */
+ }
+ }
+ logger.info(dumpStats.toString());
+ }
+}
+
diff --git a/src/org/torproject/ernie/cron/network/GeoipDatabaseImporter.java b/src/org/torproject/ernie/cron/network/GeoipDatabaseImporter.java
new file mode 100644
index 0000000..75e29ac
--- /dev/null
+++ b/src/org/torproject/ernie/cron/network/GeoipDatabaseImporter.java
@@ -0,0 +1,105 @@
+/* Copyright 2011, 2012 The Tor Project
+ * See LICENSE for licensing information */
+package org.torproject.ernie.cron.network;
+
+import java.io.BufferedReader;
+import java.io.File;
+import java.io.FileReader;
+import java.io.IOException;
+import java.sql.Connection;
+import java.sql.DriverManager;
+import java.sql.PreparedStatement;
+import java.sql.SQLException;
+import java.sql.Statement;
+import java.sql.Types;
+import java.util.HashMap;
+import java.util.Map;
+
+import org.torproject.ernie.cron.Configuration;
+
+/**
+ * Import a Maxmind GeoLite City database to resolve resolve IP addresses
+ * to country codes, latitudes, and longitudes.
+ */
+public class GeoipDatabaseImporter {
+ public static void main(String[] args) throws IOException,
+ SQLException {
+
+ /* Check if the GeoIP database files are in place. */
+ File locationsFile = new File("GeoLiteCity-Location.csv"),
+ blocksFile = new File("GeoLiteCity-Blocks.csv");
+ if (!locationsFile.exists() || !blocksFile.exists()) {
+ System.out.println("Could not find GeoLiteCity-Location.csv and/or "
+ + "GeoLiteCity-Blocks.csv in the working directory! Exiting!");
+ System.exit(1);
+ }
+
+ /* Initialize configuration to learn JDBC string. */
+ Configuration config = new Configuration();
+ String jdbcString = config.getRelayDescriptorDatabaseJDBC();
+
+ /* Connect to database. */
+ Connection c = DriverManager.getConnection(jdbcString);
+
+ /* Start by reading location information to memory. */
+ BufferedReader br = new BufferedReader(new FileReader(locationsFile));
+ String line;
+ Map<Integer, String> locations = new HashMap<Integer, String>();
+ while ((line = br.readLine()) != null) {
+ if (line.startsWith("Copyright") || line.startsWith("locId")) {
+ continue;
+ }
+ String[] parts = line.split(",");
+ int locId = Integer.parseInt(parts[0]);
+ String country = parts[1].replaceAll("\"", "");
+ String latitude = parts[5];
+ String longitude = parts[6];
+ locations.put(locId, country + "," + latitude + "," + longitude);
+ }
+ br.close();
+
+ /* Parse block information and add it to the database together with
+ * the location information. */
+ PreparedStatement ps = c.prepareStatement("INSERT INTO geoipdb "
+ + "(ipstart, ipend, country, latitude, longitude) VALUES "
+ + "(?, ?, ?, ?, ?)");
+ Statement s = c.createStatement();
+ s.execute("DELETE FROM geoipdb");
+ /* TODO The import takes 30+ minutes. Perform the import in a single
+ * transaction, or requests will return strange results in these 30+
+ * minutes. */
+ br = new BufferedReader(new FileReader(blocksFile));
+ while ((line = br.readLine()) != null) {
+ if (line.startsWith("Copyright") ||
+ line.startsWith("startIpNum")) {
+ continue;
+ }
+ String[] parts = line.replaceAll("\"", "").split(",");
+ long startIpNum = Long.parseLong(parts[0]);
+ String startIp = "" + startIpNum / 256 / 256 / 256 + "."
+ + startIpNum / 256 / 256 % 256 + "." + startIpNum / 256 % 256
+ + "." + startIpNum % 256;
+ long endIpNum = Long.parseLong(parts[1]);
+ String endIp = "" + endIpNum / 256 / 256 / 256 + "."
+ + endIpNum / 256 / 256 % 256 + "." + endIpNum / 256 % 256 + "."
+ + endIpNum % 256;
+ int locId = Integer.parseInt(parts[2]);
+ if (!locations.containsKey(locId)) {
+ System.out.println("Cannot find locId=" + locId
+ + " in locations file!");
+ continue;
+ }
+ String[] locationParts = locations.get(locId).split(",");
+ String country = locationParts[0];
+ double latitude = Double.parseDouble(locationParts[1]);
+ double longitude = Double.parseDouble(locationParts[2]);
+ ps.setObject(1, startIp, Types.OTHER);
+ ps.setObject(2, endIp, Types.OTHER);
+ ps.setString(3, country);
+ ps.setDouble(4, latitude);
+ ps.setDouble(5, longitude);
+ ps.execute();
+ }
+ }
+}
+
diff --git a/src/org/torproject/ernie/cron/performance/PerformanceStatsImporter.java b/src/org/torproject/ernie/cron/performance/PerformanceStatsImporter.java
new file mode 100644
index 0000000..815b37f
--- /dev/null
+++ b/src/org/torproject/ernie/cron/performance/PerformanceStatsImporter.java
@@ -0,0 +1,271 @@
+/* Copyright 2012 The Tor Project
+ * See LICENSE for licensing information */
+package org.torproject.ernie.cron.performance;
+
+import java.io.BufferedWriter;
+import java.io.File;
+import java.io.FileWriter;
+import java.io.IOException;
+import java.sql.Connection;
+import java.sql.DriverManager;
+import java.sql.PreparedStatement;
+import java.sql.ResultSet;
+import java.sql.SQLException;
+import java.sql.Timestamp;
+import java.text.SimpleDateFormat;
+import java.util.Calendar;
+import java.util.Iterator;
+import java.util.TimeZone;
+import java.util.logging.Level;
+import java.util.logging.Logger;
+
+import org.torproject.descriptor.Descriptor;
+import org.torproject.descriptor.DescriptorFile;
+import org.torproject.descriptor.DescriptorReader;
+import org.torproject.descriptor.DescriptorSourceFactory;
+import org.torproject.descriptor.ExtraInfoDescriptor;
+
+public class PerformanceStatsImporter {
+
+ /**
+ * How many records to commit with each database transaction.
+ */
+ private final long autoCommitCount = 500;
+
+ /**
+ * Keep track of the number of records committed before each transaction
+ */
+ private int rbsCount = 0;
+
+ /**
+ * Relay descriptor database connection.
+ */
+ private Connection conn;
+
+ /**
+ * Prepared statement to check whether a given conn-bi-direct stats
+ * string has been imported into the database before.
+ */
+ private PreparedStatement psBs;
+
+ /**
+ * Prepared statement to insert a conn-bi-direct stats string into the
+ * database.
+ */
+ private PreparedStatement psB;
+
+ /**
+ * Logger for this class.
+ */
+ private Logger logger;
+
+ /**
+ * Directory for writing raw import files.
+ */
+ private String rawFilesDirectory;
+
+ /**
+ * Raw import file containing conn-bi-direct stats strings.
+ */
+ private BufferedWriter connBiDirectOut;
+
+ /**
+ * Date format to parse timestamps.
+ */
+ private SimpleDateFormat dateTimeFormat;
+
+ private boolean importIntoDatabase;
+ private boolean writeRawImportFiles;
+
+ private File archivesDirectory;
+ private File statsDirectory;
+ private boolean keepImportHistory;
+
+ /**
+ * Initialize database importer by connecting to the database and
+ * preparing statements.
+ */
+ public PerformanceStatsImporter(String connectionURL,
+ String rawFilesDirectory, File archivesDirectory,
+ File statsDirectory, boolean keepImportHistory) {
+
+ if (archivesDirectory == null ||
+ statsDirectory == null) {
+ throw new IllegalArgumentException();
+ }
+ this.archivesDirectory = archivesDirectory;
+ this.statsDirectory = statsDirectory;
+ this.keepImportHistory = keepImportHistory;
+
+ /* Initialize logger. */
+ this.logger = Logger.getLogger(
+ PerformanceStatsImporter.class.getName());
+
+ if (connectionURL != null) {
+ try {
+ /* Connect to database. */
+ this.conn = DriverManager.getConnection(connectionURL);
+
+ /* Turn autocommit off */
+ this.conn.setAutoCommit(false);
+
+ /* Prepare statements. */
+ this.psBs = conn.prepareStatement("SELECT COUNT(*) "
+ + "FROM connbidirect WHERE source = ? AND statsend = ?");
+ this.psB = conn.prepareStatement("INSERT INTO connbidirect "
+ + "(source, statsend, seconds, belownum, readnum, writenum, "
+ + "bothnum) VALUES (?, ?, ?, ?, ?, ?, ?)");
+ this.importIntoDatabase = true;
+ } catch (SQLException e) {
+ this.logger.log(Level.WARNING, "Could not connect to database or "
+ + "prepare statements.", e);
+ }
+ }
+
+ /* Remember where we want to write raw import files. */
+ if (rawFilesDirectory != null) {
+ this.rawFilesDirectory = rawFilesDirectory;
+ this.writeRawImportFiles = true;
+ }
+
+ /* Initialize date format, so that we can format timestamps. */
+ this.dateTimeFormat = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
+ this.dateTimeFormat.setTimeZone(TimeZone.getTimeZone("UTC"));
+ }
+
+ /**
+ * Insert a conn-bi-direct stats string into the database.
+ */
+ private void addConnBiDirect(String source, long statsEndMillis,
+ long seconds, long below, long read, long write, long both) {
+ String statsEnd = this.dateTimeFormat.format(statsEndMillis);
+ if (this.importIntoDatabase) {
+ try {
+ Calendar cal = Calendar.getInstance(TimeZone.getTimeZone("UTC"));
+ Timestamp statsEndTimestamp = new Timestamp(statsEndMillis);
+ this.psBs.setString(1, source);
+ this.psBs.setTimestamp(2, statsEndTimestamp, cal);
+ ResultSet rs = psBs.executeQuery();
+ rs.next();
+ if (rs.getInt(1) == 0) {
+ this.psB.clearParameters();
+ this.psB.setString(1, source);
+ this.psB.setTimestamp(2, statsEndTimestamp, cal);
+ this.psB.setLong(3, seconds);
+ this.psB.setLong(4, below);
+ this.psB.setLong(5, read);
+ this.psB.setLong(6, write);
+ this.psB.setLong(7, both);
+ this.psB.executeUpdate();
+ rbsCount++;
+ if (rbsCount % autoCommitCount == 0) {
+ this.conn.commit();
+ }
+ }
+ } catch (SQLException e) {
+ this.logger.log(Level.WARNING, "Could not add conn-bi-direct "
+ + "stats string. We won't make any further SQL requests in "
+ + "this execution.", e);
+ this.importIntoDatabase = false;
+ }
+ }
+ if (this.writeRawImportFiles) {
+ try {
+ if (this.connBiDirectOut == null) {
+ new File(rawFilesDirectory).mkdirs();
+ this.connBiDirectOut = new BufferedWriter(new FileWriter(
+ rawFilesDirectory + "/connbidirect.sql"));
+ this.connBiDirectOut.write(" COPY connbidirect (source, "
+ + "statsend, seconds, belownum, readnum, writenum, "
+ + "bothnum) FROM stdin;\n");
+ }
+ this.connBiDirectOut.write(source + "\t" + statsEnd + "\t"
+ + seconds + "\t" + below + "\t" + read + "\t" + write + "\t"
+ + both + "\n");
+ } catch (IOException e) {
+ this.logger.log(Level.WARNING, "Could not write conn-bi-direct "
+ + "stats string to raw database import file. We won't make "
+ + "any further attempts to write raw import files in this "
+ + "execution.", e);
+ this.writeRawImportFiles = false;
+ }
+ }
+ }
+
+ public void importRelayDescriptors() {
+ if (archivesDirectory.exists()) {
+ logger.fine("Importing files in directory " + archivesDirectory
+ + "/...");
+ DescriptorReader reader =
+ DescriptorSourceFactory.createDescriptorReader();
+ reader.addDirectory(archivesDirectory);
+ if (keepImportHistory) {
+ reader.setExcludeFiles(new File(statsDirectory,
+ "performance-stats-relay-descriptor-history"));
+ }
+ Iterator<DescriptorFile> descriptorFiles = reader.readDescriptors();
+ while (descriptorFiles.hasNext()) {
+ DescriptorFile descriptorFile = descriptorFiles.next();
+ if (descriptorFile.getDescriptors() != null) {
+ for (Descriptor descriptor : descriptorFile.getDescriptors()) {
+ if (descriptor instanceof ExtraInfoDescriptor) {
+ this.addExtraInfoDescriptor(
+ (ExtraInfoDescriptor) descriptor);
+ }
+ }
+ }
+ }
+ }
+
+ logger.info("Finished importing relay descriptors.");
+ }
+
+ private void addExtraInfoDescriptor(ExtraInfoDescriptor descriptor) {
+ if (descriptor.getConnBiDirectStatsEndMillis() >= 0L) {
+ this.addConnBiDirect(descriptor.getFingerprint(),
+ descriptor.getConnBiDirectStatsEndMillis(),
+ descriptor.getConnBiDirectStatsIntervalLength(),
+ descriptor.getConnBiDirectBelow(),
+ descriptor.getConnBiDirectRead(),
+ descriptor.getConnBiDirectWrite(),
+ descriptor.getConnBiDirectBoth());
+ }
+ }
+
+ /**
+ * Close the relay descriptor database connection.
+ */
+ public void closeConnection() {
+
+ /* Log stats about imported descriptors. */
+ this.logger.info(String.format("Finished importing relay "
+ + "descriptors: %d conn-bi-direct stats lines", rbsCount));
+
+ /* Commit any stragglers before closing. */
+ if (this.conn != null) {
+ try {
+ this.conn.commit();
+ } catch (SQLException e) {
+ this.logger.log(Level.WARNING, "Could not commit final records "
+ + "to database", e);
+ }
+ try {
+ this.conn.close();
+ } catch (SQLException e) {
+ this.logger.log(Level.WARNING, "Could not close database "
+ + "connection.", e);
+ }
+ }
+
+ /* Close raw import files. */
+ try {
+ if (this.connBiDirectOut != null) {
+ this.connBiDirectOut.write("\\.\n");
+ this.connBiDirectOut.close();
+ }
+ } catch (IOException e) {
+ this.logger.log(Level.WARNING, "Could not close one or more raw "
+ + "database import files.", e);
+ }
+ }
+}
diff --git a/src/org/torproject/ernie/cron/performance/TorperfProcessor.java b/src/org/torproject/ernie/cron/performance/TorperfProcessor.java
new file mode 100644
index 0000000..d7322db
--- /dev/null
+++ b/src/org/torproject/ernie/cron/performance/TorperfProcessor.java
@@ -0,0 +1,374 @@
+/* Copyright 2011, 2012 The Tor Project
+ * See LICENSE for licensing information */
+package org.torproject.ernie.cron.performance;
+
+import java.io.BufferedReader;
+import java.io.BufferedWriter;
+import java.io.File;
+import java.io.FileReader;
+import java.io.FileWriter;
+import java.io.IOException;
+import java.sql.Connection;
+import java.sql.DriverManager;
+import java.sql.PreparedStatement;
+import java.sql.ResultSet;
+import java.sql.SQLException;
+import java.sql.Statement;
+import java.text.SimpleDateFormat;
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.Iterator;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+import java.util.SortedMap;
+import java.util.TimeZone;
+import java.util.TreeMap;
+import java.util.logging.Level;
+import java.util.logging.Logger;
+
+import org.torproject.descriptor.Descriptor;
+import org.torproject.descriptor.DescriptorFile;
+import org.torproject.descriptor.DescriptorReader;
+import org.torproject.descriptor.DescriptorSourceFactory;
+import org.torproject.descriptor.TorperfResult;
+
+public class TorperfProcessor {
+ public TorperfProcessor(File torperfDirectory, File statsDirectory,
+ String connectionURL) {
+
+ if (torperfDirectory == null || statsDirectory == null) {
+ throw new IllegalArgumentException();
+ }
+
+ Logger logger = Logger.getLogger(TorperfProcessor.class.getName());
+ File rawFile = new File(statsDirectory, "torperf-raw");
+ File statsFile = new File(statsDirectory, "torperf-stats");
+ SortedMap<String, String> rawObs = new TreeMap<String, String>();
+ SortedMap<String, String> stats = new TreeMap<String, String>();
+ int addedRawObs = 0;
+ SimpleDateFormat formatter =
+ new SimpleDateFormat("yyyy-MM-dd,HH:mm:ss");
+ formatter.setTimeZone(TimeZone.getTimeZone("UTC"));
+ try {
+ if (rawFile.exists()) {
+ logger.fine("Reading file " + rawFile.getAbsolutePath() + "...");
+ BufferedReader br = new BufferedReader(new FileReader(rawFile));
+ String line = br.readLine(); // ignore header
+ while ((line = br.readLine()) != null) {
+ if (line.split(",").length != 4) {
+ logger.warning("Corrupt line in " + rawFile.getAbsolutePath()
+ + "!");
+ break;
+ }
+ String key = line.substring(0, line.lastIndexOf(","));
+ rawObs.put(key, line);
+ }
+ br.close();
+ logger.fine("Finished reading file " + rawFile.getAbsolutePath()
+ + ".");
+ }
+ if (statsFile.exists()) {
+ logger.fine("Reading file " + statsFile.getAbsolutePath()
+ + "...");
+ BufferedReader br = new BufferedReader(new FileReader(statsFile));
+ String line = br.readLine(); // ignore header
+ while ((line = br.readLine()) != null) {
+ String key = line.split(",")[0] + "," + line.split(",")[1];
+ stats.put(key, line);
+ }
+ br.close();
+ logger.fine("Finished reading file " + statsFile.getAbsolutePath()
+ + ".");
+ }
+ if (torperfDirectory.exists()) {
+ logger.fine("Importing files in " + torperfDirectory + "/...");
+ DescriptorReader descriptorReader =
+ DescriptorSourceFactory.createDescriptorReader();
+ descriptorReader.addDirectory(torperfDirectory);
+ descriptorReader.setExcludeFiles(new File(statsDirectory,
+ "torperf-history"));
+ Iterator<DescriptorFile> descriptorFiles =
+ descriptorReader.readDescriptors();
+ while (descriptorFiles.hasNext()) {
+ DescriptorFile descriptorFile = descriptorFiles.next();
+ if (descriptorFile.getException() != null) {
+ logger.log(Level.FINE, "Error parsing file.",
+ descriptorFile.getException());
+ continue;
+ }
+ for (Descriptor descriptor : descriptorFile.getDescriptors()) {
+ if (!(descriptor instanceof TorperfResult)) {
+ continue;
+ }
+ TorperfResult result = (TorperfResult) descriptor;
+ String source = result.getSource();
+ long fileSize = result.getFileSize();
+ if (fileSize == 51200) {
+ source += "-50kb";
+ } else if (fileSize == 1048576) {
+ source += "-1mb";
+ } else if (fileSize == 5242880) {
+ source += "-5mb";
+ } else {
+ logger.fine("Unexpected file size '" + fileSize
+ + "'. Skipping.");
+ continue;
+ }
+ String dateTime = formatter.format(result.getStartMillis());
+ long completeMillis = result.getDataCompleteMillis()
+ - result.getStartMillis();
+ String key = source + "," + dateTime;
+ String value = key;
+ if ((result.didTimeout() == null &&
+ result.getDataCompleteMillis() < 1) ||
+ (result.didTimeout() != null && result.didTimeout())) {
+ value += ",-2"; // -2 for timeout
+ } else if (result.getReadBytes() < fileSize) {
+ value += ",-1"; // -1 for failure
+ } else {
+ value += "," + completeMillis;
+ }
+ if (!rawObs.containsKey(key)) {
+ rawObs.put(key, value);
+ addedRawObs++;
+ }
+ }
+ }
+ logger.fine("Finished importing files in " + torperfDirectory
+ + "/.");
+ }
+ if (rawObs.size() > 0) {
+ logger.fine("Writing file " + rawFile.getAbsolutePath() + "...");
+ rawFile.getParentFile().mkdirs();
+ BufferedWriter bw = new BufferedWriter(new FileWriter(rawFile));
+ bw.append("source,date,start,completemillis\n");
+ String tempSourceDate = null;
+ Iterator<Map.Entry<String, String>> it =
+ rawObs.entrySet().iterator();
+ List<Long> dlTimes = new ArrayList<Long>();
+ boolean haveWrittenFinalLine = false;
+ SortedMap<String, List<Long>> dlTimesAllSources =
+ new TreeMap<String, List<Long>>();
+ SortedMap<String, long[]> statusesAllSources =
+ new TreeMap<String, long[]>();
+ long failures = 0, timeouts = 0, requests = 0;
+ while (it.hasNext() || !haveWrittenFinalLine) {
+ Map.Entry<String, String> next = it.hasNext() ? it.next() : null;
+ if (tempSourceDate != null
+ && (next == null || !(next.getValue().split(",")[0] + ","
+ + next.getValue().split(",")[1]).equals(tempSourceDate))) {
+ if (dlTimes.size() > 4) {
+ Collections.sort(dlTimes);
+ long q1 = dlTimes.get(dlTimes.size() / 4 - 1);
+ long md = dlTimes.get(dlTimes.size() / 2 - 1);
+ long q3 = dlTimes.get(dlTimes.size() * 3 / 4 - 1);
+ stats.put(tempSourceDate, tempSourceDate + "," + q1 + ","
+ + md + "," + q3 + "," + timeouts + "," + failures + ","
+ + requests);
+ String allSourceDate = "all" + tempSourceDate.substring(
+ tempSourceDate.indexOf("-"));
+ if (dlTimesAllSources.containsKey(allSourceDate)) {
+ dlTimesAllSources.get(allSourceDate).addAll(dlTimes);
+ } else {
+ dlTimesAllSources.put(allSourceDate, dlTimes);
+ }
+ if (statusesAllSources.containsKey(allSourceDate)) {
+ long[] status = statusesAllSources.get(allSourceDate);
+ status[0] += timeouts;
+ status[1] += failures;
+ status[2] += requests;
+ } else {
+ long[] status = new long[3];
+ status[0] = timeouts;
+ status[1] = failures;
+ status[2] = requests;
+ statusesAllSources.put(allSourceDate, status);
+ }
+ }
+ dlTimes = new ArrayList<Long>();
+ failures = timeouts = requests = 0;
+ if (next == null) {
+ haveWrittenFinalLine = true;
+ }
+ }
+ if (next != null) {
+ bw.append(next.getValue() + "\n");
+ String[] parts = next.getValue().split(",");
+ tempSourceDate = parts[0] + "," + parts[1];
+ long completeMillis = Long.parseLong(parts[3]);
+ if (completeMillis == -2L) {
+ timeouts++;
+ } else if (completeMillis == -1L) {
+ failures++;
+ } else {
+ dlTimes.add(Long.parseLong(parts[3]));
+ }
+ requests++;
+ }
+ }
+ bw.close();
+ for (Map.Entry<String, List<Long>> e :
+ dlTimesAllSources.entrySet()) {
+ String allSourceDate = e.getKey();
+ dlTimes = e.getValue();
+ Collections.sort(dlTimes);
+ long q1 = dlTimes.get(dlTimes.size() / 4 - 1);
+ long md = dlTimes.get(dlTimes.size() / 2 - 1);
+ long q3 = dlTimes.get(dlTimes.size() * 3 / 4 - 1);
+ long[] status = statusesAllSources.get(allSourceDate);
+ timeouts = status[0];
+ failures = status[1];
+ requests = status[2];
+ stats.put(allSourceDate, allSourceDate + "," + q1 + "," + md
+ + "," + q3 + "," + timeouts + "," + failures + ","
+ + requests);
+ }
+ logger.fine("Finished writing file " + rawFile.getAbsolutePath()
+ + ".");
+ }
+ if (stats.size() > 0) {
+ logger.fine("Writing file " + statsFile.getAbsolutePath()
+ + "...");
+ statsFile.getParentFile().mkdirs();
+ BufferedWriter bw = new BufferedWriter(new FileWriter(statsFile));
+ bw.append("source,date,q1,md,q3,timeouts,failures,requests\n");
+ for (String s : stats.values()) {
+ bw.append(s + "\n");
+ }
+ bw.close();
+ logger.fine("Finished writing file " + statsFile.getAbsolutePath()
+ + ".");
+ }
+ } catch (IOException e) {
+ logger.log(Level.WARNING, "Failed writing "
+ + rawFile.getAbsolutePath() + " or "
+ + statsFile.getAbsolutePath() + "!", e);
+ }
+
+ /* Write stats. */
+ StringBuilder dumpStats = new StringBuilder("Finished writing "
+ + "statistics on torperf results.\nAdded " + addedRawObs
+ + " new observations in this execution.\n"
+ + "Last known obserations by source and file size are:");
+ String lastSource = null;
+ String lastLine = null;
+ for (String s : rawObs.keySet()) {
+ String[] parts = s.split(",");
+ if (lastSource == null) {
+ lastSource = parts[0];
+ } else if (!parts[0].equals(lastSource)) {
+ String lastKnownObservation = lastLine.split(",")[1] + " "
+ + lastLine.split(",")[2];
+ dumpStats.append("\n" + lastSource + " " + lastKnownObservation);
+ lastSource = parts[0];
+ }
+ lastLine = s;
+ }
+ if (lastSource != null) {
+ String lastKnownObservation = lastLine.split(",")[1] + " "
+ + lastLine.split(",")[2];
+ dumpStats.append("\n" + lastSource + " " + lastKnownObservation);
+ }
+ logger.info(dumpStats.toString());
+
+ /* Write results to database. */
+ if (connectionURL != null) {
+ try {
+ Map<String, String> insertRows = new HashMap<String, String>();
+ insertRows.putAll(stats);
+ Set<String> updateRows = new HashSet<String>();
+ Connection conn = DriverManager.getConnection(connectionURL);
+ conn.setAutoCommit(false);
+ Statement statement = conn.createStatement();
+ ResultSet rs = statement.executeQuery(
+ "SELECT date, source, q1, md, q3, timeouts, failures, "
+ + "requests FROM torperf_stats");
+ while (rs.next()) {
+ String date = rs.getDate(1).toString();
+ String source = rs.getString(2);
+ String key = source + "," + date;
+ if (insertRows.containsKey(key)) {
+ String insertRow = insertRows.remove(key);
+ String[] newStats = insertRow.split(",");
+ long newQ1 = Long.parseLong(newStats[2]);
+ long newMd = Long.parseLong(newStats[3]);
+ long newQ3 = Long.parseLong(newStats[4]);
+ long newTimeouts = Long.parseLong(newStats[5]);
+ long newFailures = Long.parseLong(newStats[6]);
+ long newRequests = Long.parseLong(newStats[7]);
+ long oldQ1 = rs.getLong(3);
+ long oldMd = rs.getLong(4);
+ long oldQ3 = rs.getLong(5);
+ long oldTimeouts = rs.getLong(6);
+ long oldFailures = rs.getLong(7);
+ long oldRequests = rs.getLong(8);
+ if (newQ1 != oldQ1 || newMd != oldMd || newQ3 != oldQ3 ||
+ newTimeouts != oldTimeouts ||
+ newFailures != oldFailures ||
+ newRequests != oldRequests) {
+ updateRows.add(insertRow);
+ }
+ }
+ }
+ PreparedStatement psU = conn.prepareStatement(
+ "UPDATE torperf_stats SET q1 = ?, md = ?, q3 = ?, "
+ + "timeouts = ?, failures = ?, requests = ? "
+ + "WHERE date = ? AND source = ?");
+ for (String row : updateRows) {
+ String[] newStats = row.split(",");
+ String source = newStats[0];
+ java.sql.Date date = java.sql.Date.valueOf(newStats[1]);
+ long q1 = Long.parseLong(newStats[2]);
+ long md = Long.parseLong(newStats[3]);
+ long q3 = Long.parseLong(newStats[4]);
+ long timeouts = Long.parseLong(newStats[5]);
+ long failures = Long.parseLong(newStats[6]);
+ long requests = Long.parseLong(newStats[7]);
+ psU.clearParameters();
+ psU.setLong(1, q1);
+ psU.setLong(2, md);
+ psU.setLong(3, q3);
+ psU.setLong(4, timeouts);
+ psU.setLong(5, failures);
+ psU.setLong(6, requests);
+ psU.setDate(7, date);
+ psU.setString(8, source);
+ psU.executeUpdate();
+ }
+ PreparedStatement psI = conn.prepareStatement(
+ "INSERT INTO torperf_stats (q1, md, q3, timeouts, failures, "
+ + "requests, date, source) VALUES (?, ?, ?, ?, ?, ?, ?, ?)");
+ for (String row : insertRows.values()) {
+ String[] newStats = row.split(",");
+ String source = newStats[0];
+ java.sql.Date date = java.sql.Date.valueOf(newStats[1]);
+ long q1 = Long.parseLong(newStats[2]);
+ long md = Long.parseLong(newStats[3]);
+ long q3 = Long.parseLong(newStats[4]);
+ long timeouts = Long.parseLong(newStats[5]);
+ long failures = Long.parseLong(newStats[6]);
+ long requests = Long.parseLong(newStats[7]);
+ psI.clearParameters();
+ psI.setLong(1, q1);
+ psI.setLong(2, md);
+ psI.setLong(3, q3);
+ psI.setLong(4, timeouts);
+ psI.setLong(5, failures);
+ psI.setLong(6, requests);
+ psI.setDate(7, date);
+ psI.setString(8, source);
+ psI.executeUpdate();
+ }
+ conn.commit();
+ conn.close();
+ } catch (SQLException e) {
+ logger.log(Level.WARNING, "Failed to add torperf stats to "
+ + "database.", e);
+ }
+ }
+ }
+}
+
diff --git a/src/org/torproject/ernie/cron/users/BridgeStatsFileHandler.java b/src/org/torproject/ernie/cron/users/BridgeStatsFileHandler.java
new file mode 100644
index 0000000..14795e1
--- /dev/null
+++ b/src/org/torproject/ernie/cron/users/BridgeStatsFileHandler.java
@@ -0,0 +1,718 @@
+/* Copyright 2011, 2012 The Tor Project
+ * See LICENSE for licensing information */
+package org.torproject.ernie.cron.users;
+
+import java.io.BufferedReader;
+import java.io.BufferedWriter;
+import java.io.File;
+import java.io.FileReader;
+import java.io.FileWriter;
+import java.io.IOException;
+import java.sql.Connection;
+import java.sql.DriverManager;
+import java.sql.PreparedStatement;
+import java.sql.ResultSet;
+import java.sql.SQLException;
+import java.sql.Statement;
+import java.text.ParseException;
+import java.text.SimpleDateFormat;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.Iterator;
+import java.util.List;
+import java.util.Map;
+import java.util.SortedMap;
+import java.util.SortedSet;
+import java.util.TimeZone;
+import java.util.TreeMap;
+import java.util.TreeSet;
+import java.util.logging.Level;
+import java.util.logging.Logger;
+
+import org.apache.commons.codec.DecoderException;
+import org.apache.commons.codec.binary.Hex;
+import org.apache.commons.codec.digest.DigestUtils;
+import org.torproject.descriptor.Descriptor;
+import org.torproject.descriptor.DescriptorFile;
+import org.torproject.descriptor.DescriptorReader;
+import org.torproject.descriptor.DescriptorSourceFactory;
+import org.torproject.descriptor.ExtraInfoDescriptor;
+import org.torproject.descriptor.NetworkStatusEntry;
+import org.torproject.descriptor.RelayNetworkStatusConsensus;
+import org.torproject.descriptor.ServerDescriptor;
+
+/**
+ * Determines estimates of bridge users per country and day from the
+ * extra-info descriptors that bridges publish. In a first step, the
+ * number of unique IP addresses that bridges see are normalized to a
+ * 24-hour period. In the next step, all bridges are excluded that have
+ * been running as a relay. Finally, observations are summed up and
+ * written to <code>stats/bridge-stats</code>.
+ */
+public class BridgeStatsFileHandler {
+
+ /**
+ * Two-letter country codes of known countries.
+ */
+ private SortedSet<String> countries;
+
+ /**
+ * Intermediate results file containing bridge user numbers by country
+ * as seen by single bridges, normalized to 24-hour periods.
+ */
+ private File bridgeStatsRawFile;
+
+ /**
+ * Temp file for writing intermediate results.
+ */
+ private File bridgeStatsRawTempFile;
+
+ /**
+ * Bridge user numbers by country as seen by single bridges on a given
+ * day. Map keys are bridge and date written as "bridge,date", map
+ * values are lines as read from <code>stats/bridge-stats-raw</code>.
+ */
+ private SortedMap<String, Map<String, String>> bridgeUsersRaw;
+
+ /**
+ * Helper file containing the hashed relay identities of all known
+ * relays. These hashes are compared to the bridge identity hashes to
+ * exclude bridges that have been known as relays from the statistics.
+ */
+ private File hashedRelayIdentitiesFile;
+
+ /**
+ * Known hashed relay identities used to exclude bridges that have been
+ * running as relays.
+ */
+ private SortedSet<String> hashedRelays;
+
+ /**
+ * Helper file containing extra-info descriptors published by 0.2.2.x
+ * bridges. If these descriptors contain geoip-stats, they are not
+ * included in the results, because stats are very likely broken.
+ */
+ private File zeroTwoTwoDescriptorsFile;
+
+ /**
+ * Extra-info descriptors published by 0.2.2.x bridges. If these
+ * descriptors contain geoip-stats, they are not included in the
+ * results, because stats are very likely broken.
+ */
+ private SortedSet<String> zeroTwoTwoDescriptors;
+
+ /**
+ * Final results file containing the number of bridge users per country
+ * and day. This file is not read in during initialization, but
+ * overwritten at the end of the execution.
+ */
+ private File bridgeStatsFile;
+
+ /**
+ * Logger for this class.
+ */
+ private Logger logger;
+
+ /* Database connection string. */
+ private String connectionURL = null;
+
+ private SimpleDateFormat dateTimeFormat;
+
+ private File bridgesDir;
+
+ private File statsDirectory;
+
+ private boolean keepBridgeDescriptorImportHistory;
+
+ private File archivesDirectory;
+
+ private boolean keepRelayDescriptorImportHistory;
+
+ /**
+ * Initializes this class, including reading in intermediate results
+ * files <code>stats/bridge-stats-raw</code> and
+ * <code>stats/hashed-relay-identities</code>.
+ */
+ public BridgeStatsFileHandler(String connectionURL,
+ File bridgesDir, File statsDirectory,
+ boolean keepBridgeDescriptorImportHistory, File archivesDirectory,
+ boolean keepRelayDescriptorImportHistory) {
+
+ if (bridgesDir == null || statsDirectory == null ||
+ archivesDirectory == null || statsDirectory == null) {
+ throw new IllegalArgumentException();
+ }
+ this.bridgesDir = bridgesDir;
+ this.statsDirectory = statsDirectory;
+ this.keepBridgeDescriptorImportHistory =
+ keepBridgeDescriptorImportHistory;
+ this.archivesDirectory = archivesDirectory;
+ this.keepRelayDescriptorImportHistory =
+ keepRelayDescriptorImportHistory;
+
+ /* Initialize set of known countries. */
+ this.countries = new TreeSet<String>();
+ this.countries.add("zy");
+
+ /* Initialize local data structures to hold results. */
+ this.bridgeUsersRaw = new TreeMap<String, Map<String, String>>();
+ this.hashedRelays = new TreeSet<String>();
+ this.zeroTwoTwoDescriptors = new TreeSet<String>();
+
+ /* Initialize file names for intermediate and final results. */
+ this.bridgeStatsRawFile = new File("stats/bridge-stats-raw");
+ this.bridgeStatsRawTempFile = new File("stats/bridge-stats-raw.tmp");
+ this.bridgeStatsFile = new File("stats/bridge-stats");
+ this.hashedRelayIdentitiesFile = new File(
+ "stats/hashed-relay-identities");
+ this.zeroTwoTwoDescriptorsFile = new File(
+ "stats/v022-bridge-descriptors");
+
+ /* Initialize database connection string. */
+ this.connectionURL = connectionURL;
+
+ this.dateTimeFormat = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
+ this.dateTimeFormat.setTimeZone(TimeZone.getTimeZone("UTC"));
+
+ /* Initialize logger. */
+ this.logger = Logger.getLogger(
+ BridgeStatsFileHandler.class.getName());
+
+ /* Read in bridge user numbers by country as seen by single bridges,
+ * normalized to 24-hour periods. */
+ if (this.bridgeStatsRawFile.exists()) {
+ try {
+ this.logger.fine("Reading file "
+ + this.bridgeStatsRawFile.getAbsolutePath() + "...");
+ BufferedReader br = new BufferedReader(new FileReader(
+ this.bridgeStatsRawFile));
+ String line = br.readLine();
+ if (line != null) {
+ /* The first line should contain headers that we need to parse
+ * in order to learn what countries we were interested in when
+ * writing this file. */
+ if (!line.startsWith("bridge,date,time,")) {
+ this.logger.warning("Incorrect first line '" + line + "' in "
+ + this.bridgeStatsRawFile.getAbsolutePath() + "! This line "
+ + "should contain headers! Aborting to read in this "
+ + "file!");
+ } else {
+ String[] headers = line.split(",");
+ for (int i = 3; i < headers.length; i++) {
+ if (!headers[i].equals("all")) {
+ this.countries.add(headers[i]);
+ }
+ }
+ /* Read in the rest of the file. */
+ while ((line = br.readLine()) != null) {
+ String[] parts = line.split(",");
+ if (parts.length != headers.length) {
+ this.logger.warning("Corrupt line '" + line + "' in file "
+ + this.bridgeStatsRawFile.getAbsolutePath()
+ + "! Aborting to read this file!");
+ break;
+ }
+ String hashedBridgeIdentity = parts[0];
+ String date = parts[1];
+ String time = parts[2];
+ SortedMap<String, String> obs =
+ new TreeMap<String, String>();
+ for (int i = 3; i < parts.length; i++) {
+ if (parts[i].equals("NA")) {
+ continue;
+ }
+ if (headers[i].equals("all")) {
+ obs.put("zy", parts[i]);
+ } else {
+ obs.put(headers[i], parts[i]);
+ }
+ }
+ long dateTimeMillis = dateTimeFormat.parse(date + " "
+ + time).getTime();
+ this.addObs(hashedBridgeIdentity, dateTimeMillis, obs);
+ }
+ }
+ }
+ br.close();
+ this.logger.fine("Finished reading file "
+ + this.bridgeStatsRawFile.getAbsolutePath() + ".");
+ } catch (IOException e) {
+ this.logger.log(Level.WARNING, "Failed to read file "
+ + this.bridgeStatsRawFile.getAbsolutePath() + "!", e);
+ } catch (ParseException e) {
+ this.logger.log(Level.WARNING, "Failed to read file "
+ + this.bridgeStatsRawFile.getAbsolutePath() + "!", e);
+ }
+ }
+
+ /* Read in known hashed relay identities used to exclude bridges that
+ * have been running as relays. */
+ if (this.hashedRelayIdentitiesFile.exists()) {
+ try {
+ this.logger.fine("Reading file "
+ + this.hashedRelayIdentitiesFile.getAbsolutePath() + "...");
+ BufferedReader br = new BufferedReader(new FileReader(
+ this.hashedRelayIdentitiesFile));
+ String line = null;
+ /* Read in all lines from the file and memorize them. */
+ while ((line = br.readLine()) != null) {
+ this.hashedRelays.add(line);
+ }
+ br.close();
+ this.logger.fine("Finished reading file "
+ + this.hashedRelayIdentitiesFile.getAbsolutePath() + ".");
+ } catch (IOException e) {
+ this.logger.log(Level.WARNING, "Failed to read file "
+ + this.hashedRelayIdentitiesFile.getAbsolutePath() + "!", e);
+ }
+ }
+
+ /* Read in known extra-info descriptors published by 0.2.2.x
+ * bridges. */
+ if (this.zeroTwoTwoDescriptorsFile.exists()) {
+ try {
+ this.logger.fine("Reading file "
+ + this.zeroTwoTwoDescriptorsFile.getAbsolutePath() + "...");
+ BufferedReader br = new BufferedReader(new FileReader(
+ this.zeroTwoTwoDescriptorsFile));
+ String line = null;
+ /* Read in all lines from the file and memorize them. */
+ while ((line = br.readLine()) != null) {
+ this.zeroTwoTwoDescriptors.add(line);
+ }
+ br.close();
+ this.logger.fine("Finished reading file "
+ + this.zeroTwoTwoDescriptorsFile.getAbsolutePath() + ".");
+ } catch (IOException e) {
+ this.logger.log(Level.WARNING, "Failed to read file "
+ + this.zeroTwoTwoDescriptorsFile.getAbsolutePath() + "!", e);
+ }
+ }
+ }
+
+ /**
+ * Adds a hashed relay identity string to the list of bridges that we
+ * are going to ignore in the future. If we counted user numbers from
+ * bridges that have been running as relays, our numbers would be far
+ * higher than what we think is correct.
+ */
+ public void addHashedRelay(String hashedRelayIdentity) {
+ if (!this.hashedRelays.contains(hashedRelayIdentity)) {
+ this.logger.finer("Adding new hashed relay identity: "
+ + hashedRelayIdentity);
+ this.hashedRelays.add(hashedRelayIdentity);
+ }
+ }
+
+ /**
+ * Adds an extra-info descriptor identifier published by an 0.2.2.x
+ * bridges. If this extra-info descriptor contains geoip-stats, they are
+ * not included in the results, because stats are very likely broken.
+ */
+ public void addZeroTwoTwoDescriptor(String hashedBridgeIdentity,
+ long publishedMillis) {
+ String value = hashedBridgeIdentity.toUpperCase() + ","
+ + this.dateTimeFormat.format(publishedMillis).
+ replaceAll(" ", ",");
+ if (!this.zeroTwoTwoDescriptors.contains(value)) {
+ this.logger.finer("Adding new bridge 0.2.2.x extra-info "
+ + "descriptor: " + value);
+ this.zeroTwoTwoDescriptors.add(value);
+ }
+ }
+
+ /**
+ * Returns whether the given fingerprint is a known hashed relay
+ * identity. <code>BridgeDescriptorParser</code> uses this information
+ * to decide whether to continue parsing a bridge extra-descriptor
+ * descriptor or not.
+ */
+ public boolean isKnownRelay(String hashedBridgeIdentity) {
+ return this.hashedRelays.contains(hashedBridgeIdentity);
+ }
+
+ /**
+ * Adds bridge user numbers by country as seen by a single bridge on a
+ * given date and time. Bridges can publish statistics on unique IP
+ * addresses multiple times a day, but we only want to include one
+ * observation per day. If we already have an observation from the given
+ * bridge and day, we keep the one with the later publication time and
+ * discard the other one.
+ */
+ public void addObs(String hashedIdentity, long publishedMillis,
+ Map<String, String> obs) {
+ for (String country : obs.keySet()) {
+ this.countries.add(country);
+ }
+ String dateTime = this.dateTimeFormat.format(publishedMillis);
+ String date = dateTime.split(" ")[0];
+ String time = dateTime.split(" ")[1];
+ String shortKey = hashedIdentity + "," + date;
+ String longKey = shortKey + "," + time;
+ SortedMap<String, Map<String, String>> tailMap =
+ this.bridgeUsersRaw.tailMap(shortKey);
+ String nextKey = tailMap.isEmpty() ? null : tailMap.firstKey();
+ if (nextKey == null || !nextKey.startsWith(shortKey)) {
+ this.logger.finer("Adding new bridge user numbers for key "
+ + longKey);
+ this.bridgeUsersRaw.put(longKey, obs);
+ } else if (longKey.compareTo(nextKey) > 0) {
+ this.logger.finer("Replacing existing bridge user numbers (" +
+ nextKey + " with new numbers: " + longKey);
+ this.bridgeUsersRaw.put(longKey, obs);
+ } else {
+ this.logger.finer("Not replacing existing bridge user numbers (" +
+ nextKey + " with new numbers (" + longKey + ").");
+ }
+ }
+
+ public void importSanitizedBridges() {
+ if (bridgesDir.exists()) {
+ logger.fine("Importing files in directory " + bridgesDir + "/...");
+ DescriptorReader reader =
+ DescriptorSourceFactory.createDescriptorReader();
+ reader.addDirectory(bridgesDir);
+ if (keepBridgeDescriptorImportHistory) {
+ reader.setExcludeFiles(new File(statsDirectory,
+ "bridge-stats-bridge-descriptor-history"));
+ }
+ Iterator<DescriptorFile> descriptorFiles = reader.readDescriptors();
+ while (descriptorFiles.hasNext()) {
+ DescriptorFile descriptorFile = descriptorFiles.next();
+ if (descriptorFile.getDescriptors() != null) {
+ for (Descriptor descriptor : descriptorFile.getDescriptors()) {
+ if (descriptor instanceof ServerDescriptor) {
+ this.addServerDescriptor((ServerDescriptor) descriptor);
+ } else if (descriptor instanceof ExtraInfoDescriptor) {
+ this.addExtraInfoDescriptor(
+ (ExtraInfoDescriptor) descriptor);
+ }
+ }
+ }
+ }
+ logger.info("Finished importing bridge descriptors.");
+ }
+ }
+
+ private void addServerDescriptor(ServerDescriptor descriptor) {
+ if (descriptor.getPlatform() != null &&
+ descriptor.getPlatform().startsWith("Tor 0.2.2")) {
+ this.addZeroTwoTwoDescriptor(descriptor.getFingerprint(),
+ descriptor.getPublishedMillis());
+ }
+ }
+
+ private void addExtraInfoDescriptor(ExtraInfoDescriptor descriptor) {
+ if (!this.isKnownRelay(descriptor.getFingerprint())) {
+ if (descriptor.getGeoipStartTimeMillis() >= 0 &&
+ descriptor.getGeoipClientOrigins() != null) {
+ long seconds = (descriptor.getPublishedMillis()
+ - descriptor.getGeoipStartTimeMillis()) / 1000L;
+ double allUsers = 0.0D;
+ Map<String, String> obs = new HashMap<String, String>();
+ for (Map.Entry<String, Integer> e :
+ descriptor.getGeoipClientOrigins().entrySet()) {
+ String country = e.getKey();
+ double users = ((double) e.getValue() - 4) * 86400.0D
+ / ((double) seconds);
+ allUsers += users;
+ obs.put(country, String.format("%.2f", users));
+ }
+ obs.put("zy", String.format("%.2f", allUsers));
+ this.addObs(descriptor.getFingerprint(),
+ descriptor.getPublishedMillis(), obs);
+ }
+ if (descriptor.getBridgeStatsEndMillis() >= 0 &&
+ descriptor.getBridgeIps() != null) {
+ double allUsers = 0.0D;
+ Map<String, String> obs = new HashMap<String, String>();
+ for (Map.Entry<String, Integer> e :
+ descriptor.getBridgeIps().entrySet()) {
+ String country = e.getKey();
+ double users = (double) e.getValue() - 4;
+ allUsers += users;
+ obs.put(country, String.format("%.2f", users));
+ }
+ obs.put("zy", String.format("%.2f", allUsers));
+ this.addObs(descriptor.getFingerprint(),
+ descriptor.getBridgeStatsEndMillis(), obs);
+ }
+ }
+ }
+
+ public void importRelayDescriptors() {
+ if (archivesDirectory.exists()) {
+ logger.fine("Importing files in directory " + archivesDirectory
+ + "/...");
+ DescriptorReader reader =
+ DescriptorSourceFactory.createDescriptorReader();
+ reader.addDirectory(archivesDirectory);
+ if (keepRelayDescriptorImportHistory) {
+ reader.setExcludeFiles(new File(statsDirectory,
+ "bridge-stats-relay-descriptor-history"));
+ }
+ Iterator<DescriptorFile> descriptorFiles = reader.readDescriptors();
+ while (descriptorFiles.hasNext()) {
+ DescriptorFile descriptorFile = descriptorFiles.next();
+ if (descriptorFile.getDescriptors() != null) {
+ for (Descriptor descriptor : descriptorFile.getDescriptors()) {
+ if (descriptor instanceof RelayNetworkStatusConsensus) {
+ this.addRelayNetworkStatusConsensus(
+ (RelayNetworkStatusConsensus) descriptor);
+ }
+ }
+ }
+ }
+ }
+
+ logger.info("Finished importing relay descriptors.");
+ }
+
+ private void addRelayNetworkStatusConsensus(
+ RelayNetworkStatusConsensus consensus) {
+ for (NetworkStatusEntry statusEntry :
+ consensus.getStatusEntries().values()) {
+ try {
+ this.addHashedRelay(DigestUtils.shaHex(Hex.decodeHex(
+ statusEntry.getFingerprint().toCharArray())).toUpperCase());
+ } catch (DecoderException e) {
+ }
+ }
+ }
+
+ /**
+ * Writes the list of hashed relay identities and bridge user numbers as
+ * observed by single bridges to disk, aggregates per-day statistics for
+ * all bridges, and writes those to disk, too.
+ */
+ public void writeFiles() {
+
+ /* Write hashed relay identities to disk. */
+ try {
+ this.logger.fine("Writing file "
+ + this.hashedRelayIdentitiesFile.getAbsolutePath() + "...");
+ this.hashedRelayIdentitiesFile.getParentFile().mkdirs();
+ BufferedWriter bw = new BufferedWriter(new FileWriter(
+ this.hashedRelayIdentitiesFile));
+ for (String hashedRelay : this.hashedRelays) {
+ bw.append(hashedRelay + "\n");
+ }
+ bw.close();
+ this.logger.fine("Finished writing file "
+ + this.hashedRelayIdentitiesFile.getAbsolutePath() + ".");
+ } catch (IOException e) {
+ this.logger.log(Level.WARNING, "Failed to write "
+ + this.hashedRelayIdentitiesFile.getAbsolutePath() + "!", e);
+ }
+
+ /* Write bridge extra-info descriptor identifiers to disk. */
+ try {
+ this.logger.fine("Writing file "
+ + this.zeroTwoTwoDescriptorsFile.getAbsolutePath() + "...");
+ this.zeroTwoTwoDescriptorsFile.getParentFile().mkdirs();
+ BufferedWriter bw = new BufferedWriter(new FileWriter(
+ this.zeroTwoTwoDescriptorsFile));
+ for (String descriptorIdentifier : this.zeroTwoTwoDescriptors) {
+ bw.append(descriptorIdentifier + "\n");
+ }
+ bw.close();
+ this.logger.fine("Finished writing file "
+ + this.zeroTwoTwoDescriptorsFile.getAbsolutePath() + ".");
+ } catch (IOException e) {
+ this.logger.log(Level.WARNING, "Failed to write "
+ + this.zeroTwoTwoDescriptorsFile.getAbsolutePath() + "!", e);
+ }
+
+ /* Write observations made by single bridges to disk. */
+ try {
+ this.logger.fine("Writing file "
+ + this.bridgeStatsRawFile.getAbsolutePath() + " (using "
+ + this.bridgeStatsRawTempFile.getAbsolutePath() + " as temp "
+ + "file)...");
+ this.bridgeStatsRawTempFile.getParentFile().mkdirs();
+ BufferedWriter bw = new BufferedWriter(new FileWriter(
+ this.bridgeStatsRawTempFile));
+ bw.append("bridge,date,time");
+ for (String c : this.countries) {
+ if (c.equals("zy")) {
+ bw.append(",all");
+ } else {
+ bw.append("," + c);
+ }
+ }
+ bw.append("\n");
+ for (Map.Entry<String, Map<String, String>> e :
+ this.bridgeUsersRaw.entrySet()) {
+ String longKey = e.getKey();
+ String[] parts = longKey.split(",");
+ String hashedBridgeIdentity = parts[0];
+ if (!this.hashedRelays.contains(hashedBridgeIdentity) &&
+ !this.zeroTwoTwoDescriptors.contains(longKey)) {
+ Map<String, String> obs = e.getValue();
+ StringBuilder sb = new StringBuilder(longKey);
+ for (String c : this.countries) {
+ sb.append("," + (obs.containsKey(c) &&
+ !obs.get(c).startsWith("-") ? obs.get(c) : "NA"));
+ }
+ String line = sb.toString();
+ bw.append(line + "\n");
+ }
+ }
+ bw.close();
+ if (!this.bridgeStatsRawTempFile.renameTo(
+ this.bridgeStatsRawFile)) {
+ this.logger.fine("Failed to rename "
+ + this.bridgeStatsRawTempFile.getAbsolutePath() + " to "
+ + this.bridgeStatsRawFile.getAbsolutePath() + ".");
+ }
+ this.logger.fine("Finished writing file "
+ + this.bridgeStatsRawFile.getAbsolutePath() + ".");
+ } catch (IOException e) {
+ this.logger.log(Level.WARNING, "Failed to write "
+ + this.bridgeStatsRawFile.getAbsolutePath() + " (using "
+ + this.bridgeStatsRawTempFile.getAbsolutePath() + " as temp "
+ + "file)!", e);
+ }
+
+ /* Aggregate per-day statistics. */
+ SortedMap<String, double[]> bridgeUsersPerDay =
+ new TreeMap<String, double[]>();
+ for (Map.Entry<String, Map<String, String>> e :
+ this.bridgeUsersRaw.entrySet()) {
+ String longKey = e.getKey();
+ String[] parts = longKey.split(",");
+ String hashedBridgeIdentity = parts[0];
+ String date = parts[1];
+ if (!this.hashedRelays.contains(hashedBridgeIdentity) &&
+ !this.zeroTwoTwoDescriptors.contains(longKey)) {
+ double[] users = bridgeUsersPerDay.get(date);
+ Map<String, String> obs = e.getValue();
+ if (users == null) {
+ users = new double[this.countries.size()];
+ bridgeUsersPerDay.put(date, users);
+ }
+ int i = 0;
+ for (String c : this.countries) {
+ if (obs.containsKey(c) && !obs.get(c).startsWith("-")) {
+ users[i] += Double.parseDouble(obs.get(c));
+ }
+ i++;
+ }
+ }
+ }
+
+ /* Write final results of bridge users per day and country to
+ * <code>stats/bridge-stats</code>. */
+ try {
+ this.logger.fine("Writing file "
+ + this.bridgeStatsRawFile.getAbsolutePath() + "...");
+ this.bridgeStatsFile.getParentFile().mkdirs();
+ BufferedWriter bw = new BufferedWriter(new FileWriter(
+ this.bridgeStatsFile));
+ bw.append("date");
+ for (String c : this.countries) {
+ if (c.equals("zy")) {
+ bw.append(",all");
+ } else {
+ bw.append("," + c);
+ }
+ }
+ bw.append("\n");
+
+ /* Write current observation. */
+ for (Map.Entry<String, double[]> e : bridgeUsersPerDay.entrySet()) {
+ String date = e.getKey();
+ bw.append(date);
+ double[] users = e.getValue();
+ for (int i = 0; i < users.length; i++) {
+ bw.append("," + String.format("%.2f", users[i]));
+ }
+ bw.append("\n");
+ }
+ bw.close();
+ this.logger.fine("Finished writing file "
+ + this.bridgeStatsFile.getAbsolutePath() + ".");
+ } catch (IOException e) {
+ this.logger.log(Level.WARNING, "Failed to write "
+ + this.bridgeStatsFile.getAbsolutePath() + "!", e);
+ }
+
+ /* Add daily bridge users to database. */
+ if (connectionURL != null) {
+ try {
+ List<String> countryList = new ArrayList<String>();
+ for (String c : this.countries) {
+ countryList.add(c);
+ }
+ Map<String, Integer> insertRows = new HashMap<String, Integer>(),
+ updateRows = new HashMap<String, Integer>();
+ for (Map.Entry<String, double[]> e :
+ bridgeUsersPerDay.entrySet()) {
+ String date = e.getKey();
+ double[] users = e.getValue();
+ for (int i = 0; i < users.length; i++) {
+ int usersInt = (int) users[i];
+ if (usersInt < 1) {
+ continue;
+ }
+ String country = countryList.get(i);
+ String key = date + "," + country;
+ insertRows.put(key, usersInt);
+ }
+ }
+ Connection conn = DriverManager.getConnection(connectionURL);
+ conn.setAutoCommit(false);
+ Statement statement = conn.createStatement();
+ ResultSet rs = statement.executeQuery(
+ "SELECT date, country, users FROM bridge_stats");
+ while (rs.next()) {
+ String date = rs.getDate(1).toString();
+ String country = rs.getString(2);
+ String key = date + "," + country;
+ if (insertRows.containsKey(key)) {
+ int insertRow = insertRows.remove(key);
+ int oldUsers = rs.getInt(3);
+ if (oldUsers != insertRow) {
+ updateRows.put(key, insertRow);
+ }
+ }
+ }
+ rs.close();
+ PreparedStatement psU = conn.prepareStatement(
+ "UPDATE bridge_stats SET users = ? "
+ + "WHERE date = ? AND country = ?");
+ for (Map.Entry<String, Integer> e : updateRows.entrySet()) {
+ String[] keyParts = e.getKey().split(",");
+ java.sql.Date date = java.sql.Date.valueOf(keyParts[0]);
+ String country = keyParts[1];
+ int users = e.getValue();
+ psU.clearParameters();
+ psU.setInt(1, users);
+ psU.setDate(2, date);
+ psU.setString(3, country);
+ psU.executeUpdate();
+ }
+ PreparedStatement psI = conn.prepareStatement(
+ "INSERT INTO bridge_stats (users, date, country) "
+ + "VALUES (?, ?, ?)");
+ for (Map.Entry<String, Integer> e : insertRows.entrySet()) {
+ String[] keyParts = e.getKey().split(",");
+ java.sql.Date date = java.sql.Date.valueOf(keyParts[0]);
+ String country = keyParts[1];
+ int users = e.getValue();
+ psI.clearParameters();
+ psI.setInt(1, users);
+ psI.setDate(2, date);
+ psI.setString(3, country);
+ psI.executeUpdate();
+ }
+ conn.commit();
+ conn.close();
+ } catch (SQLException e) {
+ logger.log(Level.WARNING, "Failed to add daily bridge users to "
+ + "database.", e);
+ }
+ }
+ }
+}
+
diff --git a/src/org/torproject/ernie/status/doctor/ConsensusHealthServlet.java b/src/org/torproject/ernie/status/doctor/ConsensusHealthServlet.java
new file mode 100644
index 0000000..330708f
--- /dev/null
+++ b/src/org/torproject/ernie/status/doctor/ConsensusHealthServlet.java
@@ -0,0 +1,57 @@
+/* Copyright 2011, 2012 The Tor Project
+ * See LICENSE for licensing information */
+package org.torproject.ernie.status.doctor;
+
+import java.io.BufferedInputStream;
+import java.io.BufferedOutputStream;
+import java.io.File;
+import java.io.FileInputStream;
+import java.io.IOException;
+
+import javax.servlet.ServletException;
+import javax.servlet.http.HttpServlet;
+import javax.servlet.http.HttpServletRequest;
+import javax.servlet.http.HttpServletResponse;
+
+public class ConsensusHealthServlet extends HttpServlet {
+
+ private static final long serialVersionUID = -5230032733057814869L;
+
+ public void doGet(HttpServletRequest request,
+ HttpServletResponse response) throws IOException,
+ ServletException {
+
+ /* Read file from disk and write it to response. */
+ BufferedInputStream input = null;
+ BufferedOutputStream output = null;
+ try {
+ File f = new File("/srv/metrics.torproject.org/ernie/website/"
+ + "consensus-health.html");
+ if (!f.exists()) {
+ response.sendError(HttpServletResponse.SC_NOT_FOUND);
+ return;
+ }
+ response.setContentType(this.getServletContext().getMimeType(f.getName()));
+ response.setHeader("Content-Length", String.valueOf(
+ f.length()));
+ response.setHeader("Content-Disposition",
+ "inline; filename=\"" + f.getName() + "\"");
+ input = new BufferedInputStream(new FileInputStream(f),
+ 1024);
+ output = new BufferedOutputStream(response.getOutputStream(), 1024);
+ byte[] buffer = new byte[1024];
+ int length;
+ while ((length = input.read(buffer)) > 0) {
+ output.write(buffer, 0, length);
+ }
+ } finally {
+ if (output != null) {
+ output.close();
+ }
+ if (input != null) {
+ input.close();
+ }
+ }
+ }
+}
+
diff --git a/src/org/torproject/ernie/status/exonerator/ConsensusServlet.java b/src/org/torproject/ernie/status/exonerator/ConsensusServlet.java
new file mode 100644
index 0000000..f7ed381
--- /dev/null
+++ b/src/org/torproject/ernie/status/exonerator/ConsensusServlet.java
@@ -0,0 +1,124 @@
+/* Copyright 2011, 2012 The Tor Project
+ * See LICENSE for licensing information */
+package org.torproject.ernie.status.exonerator;
+
+import java.io.BufferedOutputStream;
+import java.io.IOException;
+import java.sql.Connection;
+import java.sql.ResultSet;
+import java.sql.SQLException;
+import java.sql.Statement;
+import java.text.ParseException;
+import java.text.SimpleDateFormat;
+import java.util.TimeZone;
+import java.util.logging.Level;
+import java.util.logging.Logger;
+
+import javax.naming.Context;
+import javax.naming.InitialContext;
+import javax.naming.NamingException;
+import javax.servlet.ServletException;
+import javax.servlet.http.HttpServlet;
+import javax.servlet.http.HttpServletRequest;
+import javax.servlet.http.HttpServletResponse;
+import javax.sql.DataSource;
+
+public class ConsensusServlet extends HttpServlet {
+
+ private static final long serialVersionUID = 3147332016303032164L;
+
+ private DataSource ds;
+
+ private Logger logger;
+
+ public void init() {
+
+ /* Initialize logger. */
+ this.logger = Logger.getLogger(ConsensusServlet.class.toString());
+
+ /* Look up data source. */
+ try {
+ Context cxt = new InitialContext();
+ this.ds = (DataSource) cxt.lookup("java:comp/env/jdbc/exonerator");
+ this.logger.info("Successfully looked up data source.");
+ } catch (NamingException e) {
+ this.logger.log(Level.WARNING, "Could not look up data source", e);
+ }
+ }
+
+ public void doGet(HttpServletRequest request,
+ HttpServletResponse response) throws IOException,
+ ServletException {
+
+ /* Check valid-after parameter. */
+ String validAfterParameter = request.getParameter("valid-after");
+ if (validAfterParameter == null ||
+ validAfterParameter.length() != "yyyy-MM-dd-HH-mm-ss".length()) {
+ response.sendError(HttpServletResponse.SC_BAD_REQUEST);
+ return;
+ }
+ SimpleDateFormat parameterFormat = new SimpleDateFormat(
+ "yyyy-MM-dd-HH-mm-ss");
+ parameterFormat.setTimeZone(TimeZone.getTimeZone("UTC"));
+ long parsedTimestamp = -1L;
+ try {
+ parsedTimestamp = parameterFormat.parse(validAfterParameter).
+ getTime();
+ } catch (ParseException e) {
+ response.sendError(HttpServletResponse.SC_BAD_REQUEST);
+ return;
+ }
+ if (parsedTimestamp < 0L) {
+ response.sendError(HttpServletResponse.SC_BAD_REQUEST);
+ return;
+ }
+
+ /* Look up consensus in the database. */
+ SimpleDateFormat databaseFormat = new SimpleDateFormat(
+ "yyyy-MM-dd HH:mm:ss");
+ databaseFormat.setTimeZone(TimeZone.getTimeZone("UTC"));
+ String databaseParameter = databaseFormat.format(parsedTimestamp);
+ byte[] rawDescriptor = null;
+ try {
+ long requestedConnection = System.currentTimeMillis();
+ Connection conn = this.ds.getConnection();
+ Statement statement = conn.createStatement();
+ String query = "SELECT rawconsensus FROM consensus "
+ + "WHERE validafter = '" + databaseParameter + "'";
+ ResultSet rs = statement.executeQuery(query);
+ if (rs.next()) {
+ rawDescriptor = rs.getBytes(1);
+ }
+ rs.close();
+ statement.close();
+ conn.close();
+ this.logger.info("Returned a database connection to the pool after "
+ + (System.currentTimeMillis() - requestedConnection)
+ + " millis.");
+ } catch (SQLException e) {
+ response.sendError(HttpServletResponse.SC_INTERNAL_SERVER_ERROR);
+ return;
+ }
+
+ /* Write response. */
+ if (rawDescriptor == null) {
+ response.sendError(HttpServletResponse.SC_NOT_FOUND);
+ return;
+ }
+ try {
+ response.setContentType("text/plain");
+ response.setHeader("Content-Length", String.valueOf(
+ rawDescriptor.length));
+ response.setHeader("Content-Disposition", "inline; filename=\""
+ + validAfterParameter + "-consensus\"");
+ BufferedOutputStream output = new BufferedOutputStream(
+ response.getOutputStream());
+ output.write(rawDescriptor);
+ output.flush();
+ output.close();
+ } finally {
+ /* Nothing to do here. */
+ }
+ }
+}
+
diff --git a/src/org/torproject/ernie/status/exonerator/ExoneraTorDatabaseImporter.java b/src/org/torproject/ernie/status/exonerator/ExoneraTorDatabaseImporter.java
new file mode 100644
index 0000000..d89288f
--- /dev/null
+++ b/src/org/torproject/ernie/status/exonerator/ExoneraTorDatabaseImporter.java
@@ -0,0 +1,619 @@
+/* Copyright 2011, 2012 The Tor Project
+ * See LICENSE for licensing information */
+package org.torproject.ernie.status.exonerator;
+
+import java.io.BufferedInputStream;
+import java.io.BufferedReader;
+import java.io.BufferedWriter;
+import java.io.ByteArrayOutputStream;
+import java.io.File;
+import java.io.FileInputStream;
+import java.io.FileReader;
+import java.io.FileWriter;
+import java.io.IOException;
+import java.io.StringReader;
+import java.io.UnsupportedEncodingException;
+import java.sql.CallableStatement;
+import java.sql.Connection;
+import java.sql.DriverManager;
+import java.sql.SQLException;
+import java.sql.Timestamp;
+import java.sql.Types;
+import java.text.ParseException;
+import java.text.SimpleDateFormat;
+import java.util.Calendar;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.Map;
+import java.util.Set;
+import java.util.Stack;
+import java.util.TimeZone;
+
+import org.apache.commons.codec.binary.Base64;
+import org.apache.commons.codec.binary.Hex;
+import org.apache.commons.codec.digest.DigestUtils;
+
+/* Import Tor descriptors into the ExoneraTor database. */
+public class ExoneraTorDatabaseImporter {
+
+ /* Main function controlling the parsing process. */
+ public static void main(String[] args) {
+ readConfiguration();
+ openDatabaseConnection();
+ prepareDatabaseStatements();
+ createLockFile();
+ readImportHistoryToMemory();
+ parseDescriptors();
+ writeImportHistoryToDisk();
+ closeDatabaseConnection();
+ deleteLockFile();
+ }
+
+ /* JDBC string of the ExoneraTor database. */
+ private static String jdbcString;
+
+ /* Directory from which to import descriptors. */
+ private static String importDirString;
+
+ /* Learn JDBC string and directory to parse descriptors from. */
+ private static void readConfiguration() {
+ File configFile = new File("config");
+ if (!configFile.exists()) {
+ System.err.println("Could not find config file. Exiting.");
+ System.exit(1);
+ }
+ String line = null;
+ try {
+ BufferedReader br = new BufferedReader(new FileReader(configFile));
+ while ((line = br.readLine()) != null) {
+ if (line.startsWith("#") || line.length() < 1) {
+ continue;
+ } else if (line.startsWith("ExoneraTorDatabaseJdbc")) {
+ jdbcString = line.split(" ")[1];
+ } else if (line.startsWith("ExoneraTorImportDirectory")) {
+ importDirString = line.split(" ")[1];
+ } else {
+ /* Ignore unrecognized configuration keys. */
+ }
+ }
+ br.close();
+ } catch (IOException e) {
+ System.err.println("Could not parse config file. Exiting.");
+ System.exit(1);
+ }
+ }
+
+ /* Database connection. */
+ private static Connection connection;
+
+ /* Open a database connection using the JDBC string in the config. */
+ private static void openDatabaseConnection() {
+ try {
+ connection = DriverManager.getConnection(jdbcString);
+ } catch (SQLException e) {
+ System.out.println("Could not connect to database. Exiting.");
+ System.exit(1);
+ }
+ }
+
+ /* Callable statements to import data into the database. */
+ private static CallableStatement insertDescriptorStatement;
+ private static CallableStatement insertStatusentryStatement;
+ private static CallableStatement insertConsensusStatement;
+ private static CallableStatement insertExitlistentryStatement;
+
+ /* Prepare statements for importing data into the database. */
+ private static void prepareDatabaseStatements() {
+ try {
+ insertDescriptorStatement = connection.prepareCall(
+ "{call insert_descriptor(?, ?)}");
+ insertStatusentryStatement = connection.prepareCall(
+ "{call insert_statusentry(?, ?, ?, ?, ?, ?, ?)}");
+ insertConsensusStatement = connection.prepareCall(
+ "{call insert_consensus(?, ?)}");
+ insertExitlistentryStatement = connection.prepareCall(
+ "{call insert_exitlistentry(?, ?, ?, ?, ?)}");
+ } catch (SQLException e) {
+ System.out.println("Could not prepare callable statements to "
+ + "import data into the database. Exiting.");
+ System.exit(1);
+ }
+ }
+
+ /* Create a local lock file to prevent other instances of this import
+ * tool to run concurrently. */
+ private static void createLockFile() {
+ File lockFile = new File("exonerator-lock");
+ try {
+ if (lockFile.exists()) {
+ BufferedReader br = new BufferedReader(new FileReader(lockFile));
+ long runStarted = Long.parseLong(br.readLine());
+ br.close();
+ if (System.currentTimeMillis() - runStarted
+ < 6L * 60L * 60L * 1000L) {
+ System.out.println("File 'exonerator-lock' is less than 6 "
+ + "hours old. Exiting.");
+ System.exit(1);
+ } else {
+ System.out.println("File 'exonerator-lock' is at least 6 hours "
+ + "old. Overwriting and executing anyway.");
+ }
+ }
+ BufferedWriter bw = new BufferedWriter(new FileWriter(
+ "exonerator-lock"));
+ bw.append(String.valueOf(System.currentTimeMillis()) + "\n");
+ bw.close();
+ } catch (IOException e) {
+ System.out.println("Could not create 'exonerator-lock' file. "
+ + "Exiting.");
+ System.exit(1);
+ }
+ }
+
+ /* Last and next parse histories containing paths of parsed files and
+ * last modified times. */
+ private static Map<String, Long>
+ lastImportHistory = new HashMap<String, Long>(),
+ nextImportHistory = new HashMap<String, Long>();
+
+ /* Read stats/exonerator-import-history file from disk and remember
+ * locally when files were last parsed. */
+ private static void readImportHistoryToMemory() {
+ File parseHistoryFile = new File("stats",
+ "exonerator-import-history");
+ if (parseHistoryFile.exists()) {
+ try {
+ BufferedReader br = new BufferedReader(new FileReader(
+ parseHistoryFile));
+ String line = null;
+ int lineNumber = 0;
+ while ((line = br.readLine()) != null) {
+ lineNumber++;
+ String[] parts = line.split(",");
+ if (parts.length != 2) {
+ System.out.println("File 'stats/exonerator-import-history' "
+ + "contains a corrupt entry in line " + lineNumber
+ + ". Ignoring parse history file entirely.");
+ lastImportHistory.clear();
+ br.close();
+ return;
+ }
+ long lastModified = Long.parseLong(parts[0]);
+ String filename = parts[1];
+ lastImportHistory.put(filename, lastModified);
+ }
+ br.close();
+ } catch (IOException e) {
+ System.out.println("Could not read import history. Ignoring.");
+ lastImportHistory.clear();
+ }
+ }
+ }
+
+ /* Parse descriptors in the import directory and its subdirectories. */
+ private static void parseDescriptors() {
+ File file = new File(importDirString);
+ if (!file.exists()) {
+ System.out.println("File or directory " + importDirString + " does "
+ + "not exist. Exiting.");
+ return;
+ }
+ Stack<File> files = new Stack<File>();
+ files.add(file);
+ while (!files.isEmpty()) {
+ file = files.pop();
+ if (file.isDirectory()) {
+ for (File f : file.listFiles()) {
+ files.add(f);
+ }
+ } else {
+ parseFile(file);
+ }
+ }
+ }
+
+ /* Import a file if it wasn't imported before, and add it to the import
+ * history for the next execution. */
+ private static void parseFile(File file) {
+ long lastModified = file.lastModified();
+ String filename = file.getName();
+ nextImportHistory.put(filename, lastModified);
+ if (!lastImportHistory.containsKey(filename) ||
+ lastImportHistory.get(filename) < lastModified) {
+ try {
+ FileInputStream fis = new FileInputStream(file);
+ BufferedInputStream bis = new BufferedInputStream(fis);
+ ByteArrayOutputStream baos = new ByteArrayOutputStream();
+ int len;
+ byte[] bytes = new byte[1024];
+ while ((len = bis.read(bytes, 0, 1024)) >= 0) {
+ baos.write(bytes, 0, len);
+ }
+ bis.close();
+ byte[] allBytes = baos.toByteArray();
+ splitFile(file, allBytes);
+ } catch (IOException e) {
+ System.out.println("Could not read '" + file + "' to memory. "
+ + "Skipping.");
+ nextImportHistory.remove(filename);
+ }
+ }
+ }
+
+ /* Detect what descriptor type is contained in a file and split it to
+ * parse the single descriptors. */
+ private static void splitFile(File file, byte[] bytes) {
+ try {
+ String asciiString = new String(bytes, "US-ASCII");
+ BufferedReader br = new BufferedReader(new StringReader(
+ asciiString));
+ String line = br.readLine();
+ while (line != null && line.startsWith("@")) {
+ line = br.readLine();
+ }
+ if (line == null) {
+ return;
+ }
+ br.close();
+ String startToken = null;
+ if (line.startsWith("router ")) {
+ startToken = "router ";
+ } else if (line.equals("network-status-version 3")) {
+ startToken = "network-status-version 3";
+ } else if (line.startsWith("Downloaded ") ||
+ line.startsWith("ExitNode ")) {
+ startToken = "ExitNode ";
+ } else {
+ System.out.println("Unknown descriptor type in file '" + file
+ + "'. Ignoring.");
+ return;
+ }
+ String splitToken = "\n" + startToken;
+ int length = bytes.length, start = asciiString.indexOf(startToken);
+ while (start < length) {
+ int end = asciiString.indexOf(splitToken, start);
+ if (end < 0) {
+ end = length;
+ } else {
+ end += 1;
+ }
+ byte[] descBytes = new byte[end - start];
+ System.arraycopy(bytes, start, descBytes, 0, end - start);
+ if (startToken.equals("router ")) {
+ parseServerDescriptor(file, descBytes);
+ } else if (startToken.equals("network-status-version 3")) {
+ parseConsensus(file, descBytes);
+ } else if (startToken.equals("ExitNode ")) {
+ parseExitList(file, descBytes);
+ }
+ start = end;
+ }
+ } catch (IOException e) {
+ System.out.println("Could not parse descriptor '" + file + "'. "
+ + "Skipping.");
+ }
+ }
+
+ /* Date format to parse UTC timestamps. */
+ private static SimpleDateFormat parseFormat;
+ static {
+ parseFormat = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
+ parseFormat.setTimeZone(TimeZone.getTimeZone("UTC"));
+ }
+
+ /* Parse a single server descriptor. */
+ private static void parseServerDescriptor(File file, byte[] bytes) {
+ String ascii = "";
+ try {
+ ascii = new String(bytes, "US-ASCII");
+ } catch (UnsupportedEncodingException e) {
+ /* We know that US-ASCII is a supported encoding. */
+ }
+ String startToken = "router ";
+ String sigToken = "\nrouter-signature\n";
+ int start = ascii.indexOf(startToken);
+ int sig = ascii.indexOf(sigToken) + sigToken.length();
+ String descriptor = null;
+ if (start >= 0 || sig >= 0 || sig > start) {
+ byte[] forDigest = new byte[sig - start];
+ System.arraycopy(bytes, start, forDigest, 0, sig - start);
+ descriptor = DigestUtils.shaHex(forDigest);
+ }
+ if (descriptor == null) {
+ System.out.println("Could not calculate descriptor digest. "
+ + "Skipping.");
+ return;
+ }
+ importDescriptor(descriptor, bytes);
+ }
+
+ /* Import a single server descriptor into the database. */
+ private static void importDescriptor(String descriptor,
+ byte[] rawDescriptor) {
+ try {
+ insertDescriptorStatement.clearParameters();
+ insertDescriptorStatement.setString(1, descriptor);
+ insertDescriptorStatement.setBytes(2, rawDescriptor);
+ insertDescriptorStatement.execute();
+ } catch (SQLException e) {
+ System.out.println("Could not import descriptor into the "
+ + "database. Exiting.");
+ System.exit(1);
+ }
+ }
+
+ /* Parse a consensus. */
+ private static void parseConsensus(File file, byte[] bytes) {
+ try {
+ BufferedReader br = new BufferedReader(new StringReader(new String(
+ bytes, "US-ASCII")));
+ String line, fingerprint = null, descriptor = null;
+ Set<String> orAddresses = new HashSet<String>();
+ long validAfterMillis = -1L;
+ StringBuilder rawStatusentryBuilder = null;
+ boolean isRunning = false;
+ while ((line = br.readLine()) != null) {
+ if (line.startsWith("vote-status ") &&
+ !line.equals("vote-status consensus")) {
+ System.out.println("File '" + file + "' contains network status "
+ + "*votes*, not network status *consensuses*. Skipping.");
+ return;
+ } else if (line.startsWith("valid-after ")) {
+ String validAfterTime = line.substring("valid-after ".length());
+ try {
+ validAfterMillis = parseFormat.parse(validAfterTime).
+ getTime();
+ } catch (ParseException e) {
+ System.out.println("Could not parse valid-after timestamp in "
+ + "'" + file + "'. Skipping.");
+ return;
+ }
+ importConsensus(validAfterMillis, bytes);
+ } else if (line.startsWith("r ") ||
+ line.equals("directory-footer")) {
+ if (isRunning) {
+ byte[] rawStatusentry = rawStatusentryBuilder.toString().
+ getBytes();
+ importStatusentry(validAfterMillis, fingerprint, descriptor,
+ orAddresses, rawStatusentry);
+ orAddresses = new HashSet<String>();
+ }
+ if (line.equals("directory-footer")) {
+ return;
+ }
+ rawStatusentryBuilder = new StringBuilder(line + "\n");
+ String[] parts = line.split(" ");
+ if (parts.length < 9) {
+ System.out.println("Could not parse r line '" + line
+ + "'. Skipping.");
+ return;
+ }
+ fingerprint = Hex.encodeHexString(Base64.decodeBase64(parts[2]
+ + "=")).toLowerCase();
+ descriptor = Hex.encodeHexString(Base64.decodeBase64(parts[3]
+ + "=")).toLowerCase();
+ orAddresses.add(parts[6]);
+ } else if (line.startsWith("a ")) {
+ rawStatusentryBuilder.append(line + "\n");
+ orAddresses.add(line.substring("a ".length(),
+ line.lastIndexOf(":")));
+ } else if (line.startsWith("s ") || line.equals("s")) {
+ rawStatusentryBuilder.append(line + "\n");
+ isRunning = line.contains(" Running");
+ } else if (rawStatusentryBuilder != null) {
+ rawStatusentryBuilder.append(line + "\n");
+ }
+ }
+ } catch (IOException e) {
+ System.out.println("Could not parse consensus. Skipping.");
+ return;
+ }
+ }
+
+ /* UTC calendar for importing timestamps into the database. */
+ private static Calendar calendarUTC = Calendar.getInstance(
+ TimeZone.getTimeZone("UTC"));
+
+ /* Import a status entry with one or more OR addresses into the
+ * database. */
+ private static void importStatusentry(long validAfterMillis,
+ String fingerprint, String descriptor, Set<String> orAddresses,
+ byte[] rawStatusentry) {
+ try {
+ for (String orAddress : orAddresses) {
+ insertStatusentryStatement.clearParameters();
+ insertStatusentryStatement.setTimestamp(1,
+ new Timestamp(validAfterMillis), calendarUTC);
+ insertStatusentryStatement.setString(2, fingerprint);
+ insertStatusentryStatement.setString(3, descriptor);
+ if (!orAddress.contains(":")) {
+ String[] addressParts = orAddress.split("\\.");
+ byte[] address24Bytes = new byte[3];
+ address24Bytes[0] = (byte) Integer.parseInt(addressParts[0]);
+ address24Bytes[1] = (byte) Integer.parseInt(addressParts[1]);
+ address24Bytes[2] = (byte) Integer.parseInt(addressParts[2]);
+ String orAddress24 = Hex.encodeHexString(address24Bytes);
+ insertStatusentryStatement.setString(4, orAddress24);
+ insertStatusentryStatement.setNull(5, Types.VARCHAR);
+ insertStatusentryStatement.setString(6, orAddress);
+ } else {
+ StringBuilder addressHex = new StringBuilder();
+ int start = orAddress.startsWith("[::") ? 2 : 1;
+ int end = orAddress.length()
+ - (orAddress.endsWith("::]") ? 2 : 1);
+ String[] parts = orAddress.substring(start, end).split(":", -1);
+ for (int i = 0; i < parts.length; i++) {
+ String part = parts[i];
+ if (part.length() == 0) {
+ addressHex.append("x");
+ } else if (part.length() <= 4) {
+ addressHex.append(String.format("%4s", part));
+ } else {
+ addressHex = null;
+ break;
+ }
+ }
+ String orAddress48 = null;
+ if (addressHex != null) {
+ String addressHexString = addressHex.toString();
+ addressHexString = addressHexString.replaceFirst("x",
+ String.format("%" + (33 - addressHexString.length())
+ + "s", "0"));
+ if (!addressHexString.contains("x") &&
+ addressHexString.length() == 32) {
+ orAddress48 = addressHexString.replaceAll(" ", "0").
+ toLowerCase().substring(0, 12);
+ }
+ }
+ if (orAddress48 != null) {
+ insertStatusentryStatement.setNull(4, Types.VARCHAR);
+ insertStatusentryStatement.setString(5, orAddress48);
+ insertStatusentryStatement.setString(6,
+ orAddress.replaceAll("[\\[\\]]", ""));
+ } else {
+ System.err.println("Could not import status entry with IPv6 "
+ + "address '" + orAddress + "'. Exiting.");
+ System.exit(1);
+ }
+ }
+ insertStatusentryStatement.setBytes(7, rawStatusentry);
+ insertStatusentryStatement.execute();
+ }
+ } catch (SQLException e) {
+ System.out.println("Could not import status entry. Exiting.");
+ System.exit(1);
+ }
+ }
+
+ /* Import a consensus into the database. */
+ private static void importConsensus(long validAfterMillis,
+ byte[] rawConsensus) {
+ try {
+ insertConsensusStatement.clearParameters();
+ insertConsensusStatement.setTimestamp(1,
+ new Timestamp(validAfterMillis), calendarUTC);
+ insertConsensusStatement.setBytes(2, rawConsensus);
+ insertConsensusStatement.execute();
+ } catch (SQLException e) {
+ System.out.println("Could not import consensus. Exiting.");
+ System.exit(1);
+ }
+ }
+
+ /* Parse an exit list. */
+ private static void parseExitList(File file, byte[] bytes) {
+ try {
+ BufferedReader br = new BufferedReader(new StringReader(new String(
+ bytes, "US-ASCII")));
+ String fingerprint = null;
+ Set<String> exitAddressLines = new HashSet<String>();
+ StringBuilder rawExitlistentryBuilder = new StringBuilder();
+ while (true) {
+ String line = br.readLine();
+ if ((line == null || line.startsWith("ExitNode ")) &&
+ fingerprint != null) {
+ for (String exitAddressLine : exitAddressLines) {
+ String[] parts = exitAddressLine.split(" ");
+ String exitAddress = parts[1];
+ /* TODO Extend the following code for IPv6 once the exit list
+ * format supports it. */
+ String[] exitAddressParts = exitAddress.split("\\.");
+ byte[] exitAddress24Bytes = new byte[3];
+ exitAddress24Bytes[0] = (byte) Integer.parseInt(
+ exitAddressParts[0]);
+ exitAddress24Bytes[1] = (byte) Integer.parseInt(
+ exitAddressParts[1]);
+ exitAddress24Bytes[2] = (byte) Integer.parseInt(
+ exitAddressParts[2]);
+ String exitAddress24 = Hex.encodeHexString(
+ exitAddress24Bytes);
+ String scannedTime = parts[2] + " " + parts[3];
+ long scannedMillis = -1L;
+ try {
+ scannedMillis = parseFormat.parse(scannedTime).getTime();
+ } catch (ParseException e) {
+ System.out.println("Could not parse timestamp in "
+ + "'" + file + "'. Skipping.");
+ return;
+ }
+ byte[] rawExitlistentry = rawExitlistentryBuilder.toString().
+ getBytes();
+ importExitlistentry(fingerprint, exitAddress24, exitAddress,
+ scannedMillis, rawExitlistentry);
+ }
+ exitAddressLines.clear();
+ rawExitlistentryBuilder = new StringBuilder();
+ }
+ if (line == null) {
+ break;
+ }
+ rawExitlistentryBuilder.append(line + "\n");
+ if (line.startsWith("ExitNode ")) {
+ fingerprint = line.substring("ExitNode ".length()).
+ toLowerCase();
+ } else if (line.startsWith("ExitAddress ")) {
+ exitAddressLines.add(line);
+ }
+ }
+ br.close();
+ } catch (IOException e) {
+ System.out.println("Could not parse exit list. Skipping.");
+ return;
+ }
+ }
+
+ /* Import an exit list entry into the database. */
+ private static void importExitlistentry(String fingerprint,
+ String exitAddress24, String exitAddress, long scannedMillis,
+ byte[] rawExitlistentry) {
+ try {
+ insertExitlistentryStatement.clearParameters();
+ insertExitlistentryStatement.setString(1, fingerprint);
+ insertExitlistentryStatement.setString(2, exitAddress24);
+ insertExitlistentryStatement.setString(3, exitAddress);
+ insertExitlistentryStatement.setTimestamp(4,
+ new Timestamp(scannedMillis), calendarUTC);
+ insertExitlistentryStatement.setBytes(5, rawExitlistentry);
+ insertExitlistentryStatement.execute();
+ } catch (SQLException e) {
+ System.out.println("Could not import exit list entry. Exiting.");
+ System.exit(1);
+ }
+ }
+
+ /* Write parse history from memory to disk for the next execution. */
+ private static void writeImportHistoryToDisk() {
+ File parseHistoryFile = new File("stats/exonerator-import-history");
+ parseHistoryFile.getParentFile().mkdirs();
+ try {
+ BufferedWriter bw = new BufferedWriter(new FileWriter(
+ parseHistoryFile));
+ for (Map.Entry<String, Long> historyEntry :
+ nextImportHistory.entrySet()) {
+ bw.write(String.valueOf(historyEntry.getValue()) + ","
+ + historyEntry.getKey() + "\n");
+ }
+ bw.close();
+ } catch (IOException e) {
+ System.out.println("File 'stats/exonerator-import-history' could "
+ + "not be written. Ignoring.");
+ }
+ }
+
+ /* Close the database connection. */
+ private static void closeDatabaseConnection() {
+ try {
+ connection.close();
+ } catch (SQLException e) {
+ System.out.println("Could not close database connection. "
+ + "Ignoring.");
+ }
+ }
+
+ /* Delete the exonerator-lock file to allow the next executing of this
+ * tool. */
+ private static void deleteLockFile() {
+ new File("exonerator-lock").delete();
+ }
+}
+
diff --git a/src/org/torproject/ernie/status/exonerator/ExoneraTorServlet.java b/src/org/torproject/ernie/status/exonerator/ExoneraTorServlet.java
new file mode 100644
index 0000000..9d296fc
--- /dev/null
+++ b/src/org/torproject/ernie/status/exonerator/ExoneraTorServlet.java
@@ -0,0 +1,1154 @@
+/* Copyright 2011, 2012 The Tor Project
+ * See LICENSE for licensing information */
+package org.torproject.ernie.status.exonerator;
+
+import java.io.BufferedReader;
+import java.io.IOException;
+import java.io.PrintWriter;
+import java.io.StringReader;
+import java.sql.CallableStatement;
+import java.sql.Connection;
+import java.sql.ResultSet;
+import java.sql.SQLException;
+import java.sql.Statement;
+import java.text.ParseException;
+import java.text.SimpleDateFormat;
+import java.util.ArrayList;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Set;
+import java.util.SortedMap;
+import java.util.SortedSet;
+import java.util.TimeZone;
+import java.util.TreeMap;
+import java.util.TreeSet;
+import java.util.logging.Level;
+import java.util.logging.Logger;
+import java.util.regex.Matcher;
+import java.util.regex.Pattern;
+
+import javax.naming.Context;
+import javax.naming.InitialContext;
+import javax.naming.NamingException;
+import javax.servlet.ServletException;
+import javax.servlet.http.HttpServlet;
+import javax.servlet.http.HttpServletRequest;
+import javax.servlet.http.HttpServletResponse;
+import javax.sql.DataSource;
+
+import org.apache.commons.codec.binary.Hex;
+import org.apache.commons.lang.StringEscapeUtils;
+
+public class ExoneraTorServlet extends HttpServlet {
+
+ private static final long serialVersionUID = 1370088989739567509L;
+
+ private DataSource ds;
+
+ private Logger logger;
+
+ public void init() {
+
+ /* Initialize logger. */
+ this.logger = Logger.getLogger(ExoneraTorServlet.class.toString());
+
+ /* Look up data source. */
+ try {
+ Context cxt = new InitialContext();
+ this.ds = (DataSource) cxt.lookup("java:comp/env/jdbc/exonerator");
+ this.logger.info("Successfully looked up data source.");
+ } catch (NamingException e) {
+ this.logger.log(Level.WARNING, "Could not look up data source", e);
+ }
+ }
+
+ private void writeHeader(PrintWriter out) throws IOException {
+ out.println("<!DOCTYPE HTML PUBLIC \"-//W3C//DTD HTML 4.0 "
+ + "Transitional//EN\">\n"
+ + "<html>\n"
+ + " <head>\n"
+ + " <title>Tor Metrics Portal: ExoneraTor</title>\n"
+ + " <meta http-equiv=\"content-type\" content=\"text/html; "
+ + "charset=ISO-8859-1\">\n"
+ + " <link href=\"/css/stylesheet-ltr.css\" type=\"text/css\" "
+ + "rel=\"stylesheet\">\n"
+ + " <link href=\"/images/favicon.ico\" "
+ + "type=\"image/x-icon\" rel=\"shortcut icon\">\n"
+ + " </head>\n"
+ + " <body>\n"
+ + " <div class=\"center\">\n"
+ + " <table class=\"banner\" border=\"0\" cellpadding=\"0\" "
+ + "cellspacing=\"0\" summary=\"\">\n"
+ + " <tr>\n"
+ + " <td class=\"banner-left\"><a "
+ + "href=\"/index.html\"><img src=\"/images/top-left.png\" "
+ + "alt=\"Click to go to home page\" width=\"193\" "
+ + "height=\"79\"></a></td>\n"
+ + " <td class=\"banner-middle\">\n"
+ + " <a href=\"/\">Home</a>\n"
+ + " <a href=\"graphs.html\">Graphs</a>\n"
+ + " <a href=\"research.html\">Research</a>\n"
+ + " <a href=\"status.html\">Status</a>\n"
+ + " <br>\n"
+ + " <font size=\"2\">\n"
+ + " <a class=\"current\">ExoneraTor</a>\n"
+ + " <a href=\"relay-search.html\">Relay Search</a>\n"
+ + " <a href=\"consensus-health.html\">Consensus "
+ + "Health</a>\n"
+ + " </font>\n"
+ + " </td>\n"
+ + " <td class=\"banner-right\"></td>\n"
+ + " </tr>\n"
+ + " </table>\n"
+ + " <div class=\"main-column\" style=\"margin:5; "
+ + "Padding:0;\">\n"
+ + " <h2>ExoneraTor</h2>\n"
+ + " <h3>or: a website that tells you whether a given IP "
+ + "address was a Tor relay</h3>\n"
+ + " <br>\n"
+ + " <p>Just because you see an Internet connection from a "
+ + "particular IP address does not mean you know <i>who</i> "
+ + "originated the traffic. Tor anonymizes Internet traffic by "
+ + "\"<a href=\"https://www.torproject.org/about/overview"
+ + "#thesolution\">onion routing</a>,\" sending packets "
+ + "through a series of encrypted hops before they reach their "
+ + "destination. Therefore, if you see traffic from a Tor node, "
+ + "you may be seeing traffic that originated from someone "
+ + "using Tor, rather than from the node operator itself. The "
+ + "Tor Project and Tor node operators have no records of the "
+ + "traffic that passes over the network, but we do maintain "
+ + "current and historical records of which IP addresses are "
+ + "part of the Tor network.</p>\n"
+ + " <br>\n"
+ + " <p>ExoneraTor tells you whether there was a Tor relay "
+ + "running on a given IP address at a given time. ExoneraTor "
+ + "can further indicate whether this relay permitted exiting "
+ + "to a given server and/or TCP port. ExoneraTor learns these "
+ + "facts by parsing the public relay lists and relay "
+ + "descriptors that are collected from the Tor directory "
+ + "authorities and the exit lists collected by TorDNSEL. By "
+ + "inputting an IP address and time, you can determine whether "
+ + "that IP was then a part of the Tor network.</p>\n"
+ + " <br>\n"
+ + " <p><font color=\"red\"><b>Notice:</b> Note that the "
+ + "information you are providing below may be visible to "
+ + "anyone who can read the network traffic between you and "
+ + "this web server or who has access to this web "
+ + "server.</font></p>\n"
+ + " <br>\n");
+ }
+
+ private void writeFooter(PrintWriter out) throws IOException {
+ out.println(" <br>\n"
+ + " </div>\n"
+ + " </div>\n"
+ + " <div class=\"bottom\" id=\"bottom\">\n"
+ + " <p>This material is supported in part by the National "
+ + "Science Foundation under Grant No. CNS-0959138. Any "
+ + "opinions, finding, and conclusions or recommendations "
+ + "expressed in this material are those of the author(s) and "
+ + "do not necessarily reflect the views of the National "
+ + "Science Foundation.</p>\n"
+ + " <p>\"Tor\" and the \"Onion Logo\" are <a "
+ + "href=\"https://www.torproject.org/docs/trademark-faq.html.en"
+ + "\">registered trademarks</a> of The Tor Project, Inc.</p>\n"
+ + " <p>Data on this site is freely available under a <a "
+ + "href=\"http://creativecommons.org/publicdomain/zero/1.0/\">"
+ + "CC0 no copyright declaration</a>: To the extent possible "
+ + "under law, the Tor Project has waived all copyright and "
+ + "related or neighboring rights in the data. Graphs are "
+ + "licensed under a <a "
+ + "href=\"http://creativecommons.org/licenses/by/3.0/us/\">"
+ + "Creative Commons Attribution 3.0 United States "
+ + "License</a>.</p>\n"
+ + " </div>\n"
+ + " </body>\n"
+ + "</html>");
+ out.close();
+ }
+
+ public void doGet(HttpServletRequest request,
+ HttpServletResponse response) throws IOException,
+ ServletException {
+
+ /* Start writing response. */
+ PrintWriter out = response.getWriter();
+ writeHeader(out);
+
+ /* Open a database connection that we'll use to handle the whole
+ * request. */
+ Connection conn = null;
+ long requestedConnection = System.currentTimeMillis();
+ try {
+ conn = this.ds.getConnection();
+ } catch (SQLException e) {
+ out.println("<p><font color=\"red\"><b>Warning: </b></font>Unable "
+ + "to connect to the database. If this problem persists, "
+ + "please <a href=\"mailto:tor-assistants@xxxxxxxxxxxxxx\">let "
+ + "us know</a>!</p>\n");
+ writeFooter(out);
+ return;
+ }
+
+ /* Look up first and last consensus in the database. */
+ long firstValidAfter = -1L, lastValidAfter = -1L;
+ try {
+ Statement statement = conn.createStatement();
+ String query = "SELECT MIN(validafter) AS first, "
+ + "MAX(validafter) AS last FROM consensus";
+ ResultSet rs = statement.executeQuery(query);
+ if (rs.next()) {
+ firstValidAfter = rs.getTimestamp(1).getTime();
+ lastValidAfter = rs.getTimestamp(2).getTime();
+ }
+ rs.close();
+ statement.close();
+ } catch (SQLException e) {
+ /* Looks like we don't have any consensuses. */
+ }
+ if (firstValidAfter < 0L || lastValidAfter < 0L) {
+ out.println("<p><font color=\"red\"><b>Warning: </b></font>This "
+ + "server doesn't have any relay lists available. If this "
+ + "problem persists, please "
+ + "<a href=\"mailto:tor-assistants@xxxxxxxxxxxxxx\">let us "
+ + "know</a>!</p>\n");
+ writeFooter(out);
+ try {
+ conn.close();
+ this.logger.info("Returned a database connection to the pool "
+ + "after " + (System.currentTimeMillis()
+ - requestedConnection) + " millis.");
+ } catch (SQLException e) {
+ }
+ return;
+ }
+
+ out.println("<a name=\"relay\"></a><h3>Was there a Tor relay running "
+ + "on this IP address?</h3>");
+
+ /* Parse IP parameter. */
+ Pattern ipv4AddressPattern = Pattern.compile(
+ "^([01]?\\d\\d?|2[0-4]\\d|25[0-5])\\." +
+ "([01]?\\d\\d?|2[0-4]\\d|25[0-5])\\." +
+ "([01]?\\d\\d?|2[0-4]\\d|25[0-5])\\." +
+ "([01]?\\d\\d?|2[0-4]\\d|25[0-5])$");
+ Pattern ipv6AddressPattern = Pattern.compile(
+ "^\\[?[0-9a-fA-F:]{3,39}\\]?$");
+ String ipParameter = request.getParameter("ip");
+ String relayIP = "", ipWarning = "";
+ if (ipParameter != null && ipParameter.length() > 0) {
+ if (ipv4AddressPattern.matcher(ipParameter).matches()) {
+ String[] ipParts = ipParameter.split("\\.");
+ relayIP = Integer.parseInt(ipParts[0]) + "."
+ + Integer.parseInt(ipParts[1]) + "."
+ + Integer.parseInt(ipParts[2]) + "."
+ + Integer.parseInt(ipParts[3]);
+ } else if (ipv6AddressPattern.matcher(ipParameter).matches()) {
+ if (ipParameter.startsWith("[") && ipParameter.endsWith("]")) {
+ ipParameter = ipParameter.substring(1,
+ ipParameter.length() - 1);
+ }
+ StringBuilder addressHex = new StringBuilder();
+ int start = ipParameter.startsWith("::") ? 1 : 0;
+ int end = ipParameter.length()
+ - (ipParameter.endsWith("::") ? 1 : 0);
+ String[] parts = ipParameter.substring(start, end).split(":", -1);
+ for (int i = 0; i < parts.length; i++) {
+ String part = parts[i];
+ if (part.length() == 0) {
+ addressHex.append("x");
+ } else if (part.length() <= 4) {
+ addressHex.append(String.format("%4s", part));
+ } else {
+ addressHex = null;
+ break;
+ }
+ }
+ if (addressHex != null) {
+ String addressHexString = addressHex.toString();
+ addressHexString = addressHexString.replaceFirst("x",
+ String.format("%" + (33 - addressHexString.length()) + "s",
+ "0"));
+ if (!addressHexString.contains("x") &&
+ addressHexString.length() == 32) {
+ relayIP = ipParameter.toLowerCase();
+ }
+ }
+ if (relayIP.length() < 1) {
+ ipWarning = "\"" + (ipParameter.length() > 40 ?
+ StringEscapeUtils.escapeHtml(ipParameter.substring(0, 40))
+ + "[...]" : StringEscapeUtils.escapeHtml(ipParameter))
+ + "\" is not a valid IP address.";
+ }
+ } else {
+ ipWarning = "\"" + (ipParameter.length() > 20 ?
+ StringEscapeUtils.escapeHtml(ipParameter.substring(0, 20))
+ + "[...]" : StringEscapeUtils.escapeHtml(ipParameter))
+ + "\" is not a valid IP address.";
+ }
+ }
+
+ /* Parse timestamp parameter. */
+ String timestampParameter = request.getParameter("timestamp");
+ long timestamp = 0L;
+ boolean timestampIsDate = false;
+ String timestampStr = "", timestampWarning = "";
+ SimpleDateFormat shortDateTimeFormat = new SimpleDateFormat(
+ "yyyy-MM-dd HH:mm");
+ shortDateTimeFormat.setTimeZone(TimeZone.getTimeZone("UTC"));
+ SimpleDateFormat dateFormat = new SimpleDateFormat("yyyy-MM-dd");
+ dateFormat.setTimeZone(TimeZone.getTimeZone("UTC"));
+ if (timestampParameter != null && timestampParameter.length() > 0) {
+ try {
+ if (timestampParameter.split(" ").length == 1) {
+ timestamp = dateFormat.parse(timestampParameter).getTime();
+ timestampStr = dateFormat.format(timestamp);
+ timestampIsDate = true;
+ } else {
+ timestamp = shortDateTimeFormat.parse(timestampParameter).
+ getTime();
+ timestampStr = shortDateTimeFormat.format(timestamp);
+ }
+ if (timestamp < firstValidAfter || timestamp > lastValidAfter) {
+ timestampWarning = "Please pick a date or timestamp between \""
+ + shortDateTimeFormat.format(firstValidAfter) + "\" and \""
+ + shortDateTimeFormat.format(lastValidAfter) + "\".";
+ timestamp = 0L;
+ }
+ } catch (ParseException e) {
+ /* We have no way to handle this exception, other than leaving
+ timestampStr at "". */
+ timestampWarning = "\"" + (timestampParameter.length() > 20 ?
+ StringEscapeUtils.escapeHtml(timestampParameter.
+ substring(0, 20)) + "[...]" :
+ StringEscapeUtils.escapeHtml(timestampParameter))
+ + "\" is not a valid date or timestamp.";
+ }
+ }
+
+ /* If either IP address or timestamp is provided, the other one must
+ * be provided, too. */
+ if (relayIP.length() < 1 && timestampStr.length() > 0 &&
+ ipWarning.length() < 1) {
+ ipWarning = "Please provide an IP address.";
+ }
+ if (relayIP.length() > 0 && timestamp < 1 &&
+ timestampWarning.length() < 1) {
+ timestampWarning = "Please provide a date or timestamp.";
+ }
+
+ /* Parse target IP parameter. */
+ String targetIP = "", targetPort = "", target = "";
+ String[] targetIPParts = null;
+ String targetAddrParameter = request.getParameter("targetaddr");
+ String targetAddrWarning = "";
+ if (targetAddrParameter != null && targetAddrParameter.length() > 0) {
+ Matcher targetAddrParameterMatcher =
+ ipv4AddressPattern.matcher(targetAddrParameter);
+ if (targetAddrParameterMatcher.matches()) {
+ String[] targetAddrParts = targetAddrParameter.split("\\.");
+ targetIP = Integer.parseInt(targetAddrParts[0]) + "."
+ + Integer.parseInt(targetAddrParts[1]) + "."
+ + Integer.parseInt(targetAddrParts[2]) + "."
+ + Integer.parseInt(targetAddrParts[3]);
+ target = targetIP;
+ targetIPParts = targetIP.split("\\.");
+ } else {
+ targetAddrWarning = "\"" + (targetAddrParameter.length() > 20 ?
+ StringEscapeUtils.escapeHtml(targetAddrParameter.substring(
+ 0, 20)) + "[...]" : StringEscapeUtils.escapeHtml(
+ targetAddrParameter)) + "\" is not a valid IP address.";
+ }
+ }
+
+ /* Parse target port parameter. */
+ String targetPortParameter = request.getParameter("targetport");
+ String targetPortWarning = "";
+ if (targetPortParameter != null && targetPortParameter.length() > 0) {
+ Pattern targetPortPattern = Pattern.compile("\\d+");
+ if (targetPortParameter.length() < 5 &&
+ targetPortPattern.matcher(targetPortParameter).matches() &&
+ !targetPortParameter.equals("0") &&
+ Integer.parseInt(targetPortParameter) < 65536) {
+ targetPort = targetPortParameter;
+ if (target != null) {
+ target += ":" + targetPort;
+ } else {
+ target = targetPort;
+ }
+ } else {
+ targetPortWarning = "\"" + (targetPortParameter.length() > 8 ?
+ StringEscapeUtils.escapeHtml(targetPortParameter.
+ substring(0, 8)) + "[...]" :
+ StringEscapeUtils.escapeHtml(targetPortParameter))
+ + "\" is not a valid TCP port.";
+ }
+ }
+
+ /* If target port is provided, a target address must be provided,
+ * too. */
+ /* TODO Relax this requirement. */
+ if (targetPort.length() > 0 && targetIP.length() < 1 &&
+ targetAddrWarning.length() < 1) {
+ targetAddrWarning = "Please provide an IP address.";
+ }
+
+ /* Write form with IP address and timestamp. */
+ out.println(" <form action=\"#relay\">\n"
+ + " <input type=\"hidden\" name=\"targetaddr\" "
+ + (targetIP.length() > 0 ? " value=\"" + targetIP + "\"" : "")
+ + ">\n"
+ + " <input type=\"hidden\" name=\"targetPort\""
+ + (targetPort.length() > 0 ? " value=\"" + targetPort + "\"" : "")
+ + ">\n"
+ + " <table>\n"
+ + " <tr>\n"
+ + " <td align=\"right\">IP address in question:"
+ + "</td>\n"
+ + " <td><input type=\"text\" name=\"ip\" size=\"30\""
+ + (relayIP.length() > 0 ? " value=\"" + relayIP + "\""
+ : "")
+ + ">"
+ + (ipWarning.length() > 0 ? "<br><font color=\"red\">"
+ + ipWarning + "</font>" : "")
+ + "</td>\n"
+ + " <td><i>(Ex.: 86.59.21.38 or "
+ + "2001:858:2:2:aabb:0:563b:1526)</i></td>\n"
+ + " </tr>\n"
+ + " <tr>\n"
+ + " <td align=\"right\">Date or timestamp, in "
+ + "UTC:</td>\n"
+ + " <td><input type=\"text\" name=\"timestamp\""
+ + " size=\"30\""
+ + (timestampStr.length() > 0 ? " value=\"" + timestampStr + "\""
+ : "")
+ + ">"
+ + (timestampWarning.length() > 0 ? "<br><font color=\"red\">"
+ + timestampWarning + "</font>" : "")
+ + "</td>\n"
+ + " <td><i>(Ex.: 2010-01-01 or 2010-01-01 12:00)"
+ + "</i></td>\n"
+ + " </tr>\n"
+ + " <tr>\n"
+ + " <td></td>\n"
+ + " <td>\n"
+ + " <input type=\"submit\">\n"
+ + " <input type=\"reset\">\n"
+ + " </td>\n"
+ + " <td></td>\n"
+ + " </tr>\n"
+ + " </table>\n"
+ + " </form>\n");
+
+ if (relayIP.length() < 1 || timestamp < 1) {
+ writeFooter(out);
+ try {
+ conn.close();
+ this.logger.info("Returned a database connection to the pool "
+ + "after " + (System.currentTimeMillis()
+ - requestedConnection) + " millis.");
+ } catch (SQLException e) {
+ }
+ return;
+ }
+
+ out.printf("<p>Looking up IP address %s in the relay lists "
+ + "published ", relayIP);
+ long timestampFrom, timestampTo;
+ if (timestampIsDate) {
+ /* If we only have a date, consider all consensuses published on the
+ * given date, plus the ones published 3 hours before the given date
+ * and until 23:59:59. */
+ timestampFrom = timestamp - 3L * 60L * 60L * 1000L;
+ timestampTo = timestamp + (24L * 60L * 60L - 1L) * 1000L;
+ out.printf("on %s", timestampStr);
+ } else {
+ /* If we have an exact timestamp, consider the consensuses published
+ * in the 3 hours preceding the UTC timestamp. */
+ timestampFrom = timestamp - 3L * 60L * 60L * 1000L;
+ timestampTo = timestamp;
+ out.printf("between %s and %s UTC",
+ shortDateTimeFormat.format(timestampFrom),
+ shortDateTimeFormat.format(timestampTo));
+ }
+ /* If we don't find any relays in the given time interval, also look
+ * at consensuses published 12 hours before and 12 hours after the
+ * interval, in case the user got the "UTC" bit wrong. */
+ long timestampTooOld = timestampFrom - 12L * 60L * 60L * 1000L;
+ long timestampTooNew = timestampTo + 12L * 60L * 60L * 1000L;
+ out.print(" as well as in the relevant exit lists. Clients could "
+ + "have selected any of these relays to build circuits. "
+ + "You may follow the links to relay lists and relay descriptors "
+ + "to grep for the lines printed below and confirm that results "
+ + "are correct.<br>");
+ SimpleDateFormat validAfterTimeFormat = new SimpleDateFormat(
+ "yyyy-MM-dd HH:mm:ss");
+ validAfterTimeFormat.setTimeZone(TimeZone.getTimeZone("UTC"));
+ String fromValidAfter = validAfterTimeFormat.format(timestampTooOld);
+ String toValidAfter = validAfterTimeFormat.format(timestampTooNew);
+ SortedSet<Long> tooOldConsensuses = new TreeSet<Long>();
+ SortedSet<Long> relevantConsensuses = new TreeSet<Long>();
+ SortedSet<Long> tooNewConsensuses = new TreeSet<Long>();
+ try {
+ Statement statement = conn.createStatement();
+ String query = "SELECT validafter FROM consensus "
+ + "WHERE validafter >= '" + fromValidAfter
+ + "' AND validafter <= '" + toValidAfter + "'";
+ ResultSet rs = statement.executeQuery(query);
+ while (rs.next()) {
+ long consensusTime = rs.getTimestamp(1).getTime();
+ if (consensusTime < timestampFrom) {
+ tooOldConsensuses.add(consensusTime);
+ } else if (consensusTime > timestampTo) {
+ tooNewConsensuses.add(consensusTime);
+ } else {
+ relevantConsensuses.add(consensusTime);
+ }
+ }
+ rs.close();
+ statement.close();
+ } catch (SQLException e) {
+ /* Looks like we don't have any consensuses in the requested
+ * interval. */
+ }
+ SortedSet<Long> allConsensuses = new TreeSet<Long>();
+ allConsensuses.addAll(tooOldConsensuses);
+ allConsensuses.addAll(relevantConsensuses);
+ allConsensuses.addAll(tooNewConsensuses);
+ if (allConsensuses.isEmpty()) {
+ out.println(" <p>No relay lists found!</p>\n"
+ + " <p>Result is INDECISIVE!</p>\n"
+ + " <p>We cannot make any statement whether there was "
+ + "a Tor relay running on IP address " + relayIP
+ + (timestampIsDate ? " on " : " at ") + timestampStr + "! We "
+ + "did not find any relevant relay lists at the given time. If "
+ + "you think this is an error on our side, please "
+ + "<a href=\"mailto:tor-assistants@xxxxxxxxxxxxxx\">contact "
+ + "us</a>!</p>\n");
+ writeFooter(out);
+ try {
+ conn.close();
+ this.logger.info("Returned a database connection to the pool "
+ + "after " + (System.currentTimeMillis()
+ - requestedConnection) + " millis.");
+ } catch (SQLException e) {
+ }
+ return;
+ }
+
+ /* Search for status entries with the given IP address as onion
+ * routing address, plus status entries of relays having an exit list
+ * entry with the given IP address as exit address. */
+ SortedMap<Long, SortedMap<String, String>> statusEntries =
+ new TreeMap<Long, SortedMap<String, String>>();
+ SortedSet<Long> positiveConsensusesNoTarget = new TreeSet<Long>();
+ SortedMap<String, Set<Long>> relevantDescriptors =
+ new TreeMap<String, Set<Long>>();
+ try {
+ CallableStatement cs = conn.prepareCall(
+ "{call search_statusentries_by_address_date(?, ?)}");
+ cs.setString(1, relayIP);
+ cs.setDate(2, new java.sql.Date(timestamp));
+ ResultSet rs = cs.executeQuery();
+ while (rs.next()) {
+ byte[] rawstatusentry = rs.getBytes(1);
+ String descriptor = rs.getString(2);
+ long validafter = rs.getTimestamp(3).getTime();
+ positiveConsensusesNoTarget.add(validafter);
+ if (!relevantDescriptors.containsKey(descriptor)) {
+ relevantDescriptors.put(descriptor, new HashSet<Long>());
+ }
+ relevantDescriptors.get(descriptor).add(validafter);
+ String fingerprint = rs.getString(4);
+ String exitaddress = rs.getString(6);
+ StringBuilder html = new StringBuilder();
+ for (String line : new String(rawstatusentry).split("\n")) {
+ if (line.startsWith("r ")) {
+ String[] parts = line.split(" ");
+ boolean orAddressMatches = parts[6].equals(relayIP);
+ html.append("r " + parts[1] + " " + parts[2] + " "
+ + "<a href=\"serverdesc?desc-id=" + descriptor + "\" "
+ + "target=\"_blank\">" + parts[3] + "</a> " + parts[4]
+ + " " + parts[5] + " " + (orAddressMatches ? "<b>" : "")
+ + parts[6] + (orAddressMatches ? "</b>" : "") + " "
+ + parts[7] + " " + parts[8] + "\n");
+ } else if (line.startsWith("a ") &&
+ line.toLowerCase().contains(relayIP)) {
+ String address = line.substring("a ".length(),
+ line.lastIndexOf(":"));
+ String port = line.substring(line.lastIndexOf(":"));
+ html.append("a <b>" + address + "</b>" + port + "\n");
+ }
+ }
+ if (exitaddress != null && exitaddress.length() > 0) {
+ long scanned = rs.getTimestamp(7).getTime();
+ html.append(" [ExitAddress <b>" + exitaddress
+ + "</b> " + validAfterTimeFormat.format(scanned) + "]\n");
+ }
+ if (!statusEntries.containsKey(validafter)) {
+ statusEntries.put(validafter, new TreeMap<String, String>());
+ }
+ statusEntries.get(validafter).put(fingerprint, html.toString());
+ }
+ rs.close();
+ cs.close();
+ } catch (SQLException e) {
+ /* Nothing found. */
+ }
+
+ /* Print out what we found. */
+ SimpleDateFormat validAfterUrlFormat = new SimpleDateFormat(
+ "yyyy-MM-dd-HH-mm-ss");
+ validAfterUrlFormat.setTimeZone(TimeZone.getTimeZone("UTC"));
+ out.print("<pre><code>");
+ for (long consensus : allConsensuses) {
+ if (relevantConsensuses.contains(consensus)) {
+ String validAfterDatetime = validAfterTimeFormat.format(
+ consensus);
+ String validAfterString = validAfterUrlFormat.format(consensus);
+ out.print("valid-after <b>"
+ + "<a href=\"consensus?valid-after="
+ + validAfterString + "\" target=\"_blank\">"
+ + validAfterDatetime + "</b></a>\n");
+ if (statusEntries.containsKey(consensus)) {
+ for (String htmlString :
+ statusEntries.get(consensus).values()) {
+ out.print(htmlString);
+ }
+ }
+ out.print("\n");
+ }
+ }
+ out.print("</code></pre>");
+ if (relevantDescriptors.isEmpty()) {
+ out.printf(" <p>None found!</p>\n"
+ + " <p>Result is NEGATIVE with high certainty!</p>\n"
+ + " <p>We did not find IP "
+ + "address " + relayIP + " in any of the relay or exit lists "
+ + "that were published between %s and %s.</p>\n",
+ dateFormat.format(timestampTooOld),
+ dateFormat.format(timestampTooNew));
+ /* Run another query to find out if there are relays running on
+ * other IP addresses in the same /24 or /48 network and tell the
+ * user about it. */
+ List<String> addressesInSameNetwork = new ArrayList<String>();
+ if (!relayIP.contains(":")) {
+ String[] relayIPParts = relayIP.split("\\.");
+ byte[] address24Bytes = new byte[3];
+ address24Bytes[0] = (byte) Integer.parseInt(relayIPParts[0]);
+ address24Bytes[1] = (byte) Integer.parseInt(relayIPParts[1]);
+ address24Bytes[2] = (byte) Integer.parseInt(relayIPParts[2]);
+ String address24 = Hex.encodeHexString(address24Bytes);
+ try {
+ CallableStatement cs = conn.prepareCall(
+ "{call search_addresses_in_same_24 (?, ?)}");
+ cs.setString(1, address24);
+ cs.setDate(2, new java.sql.Date(timestamp));
+ ResultSet rs = cs.executeQuery();
+ while (rs.next()) {
+ String address = rs.getString(1);
+ if (!addressesInSameNetwork.contains(address)) {
+ addressesInSameNetwork.add(address);
+ }
+ }
+ rs.close();
+ cs.close();
+ } catch (SQLException e) {
+ /* No other addresses in the same /24 found. */
+ }
+ } else {
+ StringBuilder addressHex = new StringBuilder();
+ int start = relayIP.startsWith("::") ? 1 : 0;
+ int end = relayIP.length() - (relayIP.endsWith("::") ? 1 : 0);
+ String[] parts = relayIP.substring(start, end).split(":", -1);
+ for (int i = 0; i < parts.length; i++) {
+ String part = parts[i];
+ if (part.length() == 0) {
+ addressHex.append("x");
+ } else if (part.length() <= 4) {
+ addressHex.append(String.format("%4s", part));
+ } else {
+ addressHex = null;
+ break;
+ }
+ }
+ String address48 = null;
+ if (addressHex != null) {
+ String addressHexString = addressHex.toString();
+ addressHexString = addressHexString.replaceFirst("x",
+ String.format("%" + (33 - addressHexString.length())
+ + "s", "0"));
+ if (!addressHexString.contains("x") &&
+ addressHexString.length() == 32) {
+ address48 = addressHexString.replaceAll(" ", "0").
+ toLowerCase().substring(0, 12);
+ }
+ }
+ if (address48 != null) {
+ try {
+ CallableStatement cs = conn.prepareCall(
+ "{call search_addresses_in_same_48 (?, ?)}");
+ cs.setString(1, address48);
+ cs.setDate(2, new java.sql.Date(timestamp));
+ ResultSet rs = cs.executeQuery();
+ while (rs.next()) {
+ String address = rs.getString(1);
+ if (!addressesInSameNetwork.contains(address)) {
+ addressesInSameNetwork.add(address);
+ }
+ }
+ rs.close();
+ cs.close();
+ } catch (SQLException e) {
+ /* No other addresses in the same /48 found. */
+ }
+ }
+ }
+ if (!addressesInSameNetwork.isEmpty()) {
+ if (!relayIP.contains(":")) {
+ out.print(" <p>The following other IP addresses of Tor "
+ + "relays in the same /24 network were found in relay "
+ + "and/or exit lists around the time that could be related "
+ + "to IP address " + relayIP + ":</p>\n");
+ } else {
+ out.print(" <p>The following other IP addresses of Tor "
+ + "relays in the same /48 network were found in relay "
+ + "lists around the time that could be related to IP "
+ + "address " + relayIP + ":</p>\n");
+ }
+ out.print(" <ul>\n");
+ for (String s : addressesInSameNetwork) {
+ out.print(" <li>" + s + "</li>\n");
+ }
+ out.print(" </ul>\n");
+ }
+ writeFooter(out);
+ try {
+ conn.close();
+ this.logger.info("Returned a database connection to the pool "
+ + "after " + (System.currentTimeMillis()
+ - requestedConnection) + " millis.");
+ } catch (SQLException e) {
+ }
+ return;
+ }
+
+ /* Print out result. */
+ boolean inMostRelevantConsensuses = false,
+ inOtherRelevantConsensus = false,
+ inTooOldConsensuses = false,
+ inTooNewConsensuses = false;
+ for (long match : positiveConsensusesNoTarget) {
+ if (timestampIsDate &&
+ dateFormat.format(match).equals(timestampStr)) {
+ inMostRelevantConsensuses = true;
+ } else if (!timestampIsDate &&
+ match == relevantConsensuses.last()) {
+ inMostRelevantConsensuses = true;
+ } else if (relevantConsensuses.contains(match)) {
+ inOtherRelevantConsensus = true;
+ } else if (tooOldConsensuses.contains(match)) {
+ inTooOldConsensuses = true;
+ } else if (tooNewConsensuses.contains(match)) {
+ inTooNewConsensuses = true;
+ }
+ }
+ if (inMostRelevantConsensuses) {
+ out.print(" <p>Result is POSITIVE with high certainty!"
+ + "</p>\n"
+ + " <p>We found one or more relays on IP address "
+ + relayIP + " in ");
+ if (timestampIsDate) {
+ out.print("relay list published on " + timestampStr);
+ } else {
+ out.print("the most recent relay list preceding " + timestampStr);
+ }
+ out.print(" that clients were likely to know.</p>\n");
+ } else {
+ if (inOtherRelevantConsensus) {
+ out.println(" <p>Result is POSITIVE "
+ + "with moderate certainty!</p>\n");
+ out.println("<p>We found one or more relays on IP address "
+ + relayIP + ", but not in ");
+ if (timestampIsDate) {
+ out.print("a relay list published on " + timestampStr);
+ } else {
+ out.print("the most recent relay list preceding "
+ + timestampStr);
+ }
+ out.print(". A possible reason for the relay being missing in a "
+ + "relay list might be that some of the directory "
+ + "authorities had difficulties connecting to the relay. "
+ + "However, clients might still have used the relay.</p>\n");
+ } else {
+ out.println(" <p>Result is NEGATIVE "
+ + "with high certainty!</p>\n");
+ out.println(" <p>We did not find any relay on IP address "
+ + relayIP
+ + " in the relay lists 3 hours preceding " + timestampStr
+ + ".</p>\n");
+ if (inTooOldConsensuses || inTooNewConsensuses) {
+ if (inTooOldConsensuses && !inTooNewConsensuses) {
+ out.println(" <p>Note that we found a matching relay "
+ + "in relay lists that were published between 15 and 3 "
+ + "hours before " + timestampStr + ".</p>\n");
+ } else if (!inTooOldConsensuses && inTooNewConsensuses) {
+ out.println(" <p>Note that we found a matching relay "
+ + "in relay lists that were published up to 12 hours "
+ + "after " + timestampStr + ".</p>\n");
+ } else {
+ out.println(" <p>Note that we found a matching relay "
+ + "in relay lists that were published between 15 and 3 "
+ + "hours before and in relay lists that were published "
+ + "up to 12 hours after " + timestampStr + ".</p>\n");
+ }
+ if (timestampIsDate) {
+ out.println("<p>Be sure to try out the previous/next day or "
+ + "provide an exact timestamp in UTC.</p>");
+ } else {
+ out.println("<p>Make sure that the timestamp you "
+ + "provided is correctly converted to the UTC "
+ + "timezone.</p>");
+ }
+ }
+ /* We didn't find any descriptor. No need to look up targets. */
+ writeFooter(out);
+ try {
+ conn.close();
+ this.logger.info("Returned a database connection to the pool "
+ + "after " + (System.currentTimeMillis()
+ - requestedConnection) + " millis.");
+ } catch (SQLException e) {
+ }
+ return;
+ }
+ }
+
+ /* Looking up targets for IPv6 is not supported yet. */
+ if (relayIP.contains(":")) {
+ writeFooter(out);
+ return;
+ }
+
+ /* Second part: target */
+ out.println("<br><a name=\"exit\"></a><h3>Was this relay configured "
+ + "to permit exiting to a given target?</h3>");
+
+ out.println(" <form action=\"#exit\">\n"
+ + " <input type=\"hidden\" name=\"timestamp\"\n"
+ + " value=\"" + timestampStr + "\">\n"
+ + " <input type=\"hidden\" name=\"ip\" "
+ + "value=\"" + relayIP + "\">\n"
+ + " <table>\n"
+ + " <tr>\n"
+ + " <td align=\"right\">Target address:</td>\n"
+ + " <td><input type=\"text\" name=\"targetaddr\""
+ + (targetIP.length() > 0 ? " value=\"" + targetIP + "\"" : "")
+ + "\">"
+ + (targetAddrWarning.length() > 0 ? "<br><font color=\"red\">"
+ + targetAddrWarning + "</font>" : "")
+ + "</td>\n"
+ + " <td><i>(Ex.: 4.3.2.1)</i></td>\n"
+ + " </tr>\n"
+ + " <tr>\n"
+ + " <td align=\"right\">Target port:</td>\n"
+ + " <td><input type=\"text\" name=\"targetport\""
+ + (targetPort.length() > 0 ? " value=\"" + targetPort + "\""
+ : "")
+ + ">"
+ + (targetPortWarning.length() > 0 ? "<br><font color=\"red\">"
+ + targetPortWarning + "</font>" : "")
+ + "</td>\n"
+ + " <td><i>(Ex.: 80)</i></td>\n"
+ + " </tr>\n"
+ + " <tr>\n"
+ + " <td></td>\n"
+ + " <td>\n"
+ + " <input type=\"submit\">\n"
+ + " <input type=\"reset\">\n"
+ + " </td>\n"
+ + " <td></td>\n"
+ + " </tr>\n"
+ + " </table>\n"
+ + " </form>\n");
+
+ if (targetIP.length() < 1) {
+ writeFooter(out);
+ try {
+ conn.close();
+ this.logger.info("Returned a database connection to the pool "
+ + "after " + (System.currentTimeMillis()
+ - requestedConnection) + " millis.");
+ } catch (SQLException e) {
+ }
+ return;
+ }
+
+ /* Parse router descriptors to check exit policies. */
+ out.println("<p>Searching the relay descriptors published by the "
+ + "relay on IP address " + relayIP + " to find out whether this "
+ + "relay permitted exiting to " + target + ". You may follow the "
+ + "links above to the relay descriptors and grep them for the "
+ + "lines printed below to confirm that results are correct.</p>");
+ SortedSet<Long> positiveConsensuses = new TreeSet<Long>();
+ Set<String> missingDescriptors = new HashSet<String>();
+ Set<String> descriptors = relevantDescriptors.keySet();
+ for (String descriptor : descriptors) {
+ byte[] rawDescriptor = null;
+ try {
+ String query = "SELECT rawdescriptor FROM descriptor "
+ + "WHERE descriptor = '" + descriptor + "'";
+ Statement statement = conn.createStatement();
+ ResultSet rs = statement.executeQuery(query);
+ if (rs.next()) {
+ rawDescriptor = rs.getBytes(1);
+ }
+ rs.close();
+ statement.close();
+ } catch (SQLException e) {
+ /* Consider this descriptors as 'missing'. */
+ continue;
+ }
+ if (rawDescriptor != null && rawDescriptor.length > 0) {
+ missingDescriptors.remove(descriptor);
+ String rawDescriptorString = new String(rawDescriptor,
+ "US-ASCII");
+ try {
+ BufferedReader br = new BufferedReader(
+ new StringReader(rawDescriptorString));
+ String line = null, routerLine = null, publishedLine = null;
+ StringBuilder acceptRejectLines = new StringBuilder();
+ boolean foundMatch = false;
+ while ((line = br.readLine()) != null) {
+ if (line.startsWith("router ")) {
+ routerLine = line;
+ } else if (line.startsWith("published ")) {
+ publishedLine = line;
+ } else if (line.startsWith("reject ") ||
+ line.startsWith("accept ")) {
+ if (foundMatch) {
+ out.println(line);
+ continue;
+ }
+ boolean ruleAccept = line.split(" ")[0].equals("accept");
+ String ruleAddress = line.split(" ")[1].split(":")[0];
+ if (!ruleAddress.equals("*")) {
+ if (!ruleAddress.contains("/") &&
+ !ruleAddress.equals(targetIP)) {
+ /* IP address does not match. */
+ acceptRejectLines.append(line + "\n");
+ continue;
+ }
+ String[] ruleIPParts = ruleAddress.split("/")[0].
+ split("\\.");
+ int ruleNetwork = ruleAddress.contains("/") ?
+ Integer.parseInt(ruleAddress.split("/")[1]) : 32;
+ for (int i = 0; i < 4; i++) {
+ if (ruleNetwork == 0) {
+ break;
+ } else if (ruleNetwork >= 8) {
+ if (ruleIPParts[i].equals(targetIPParts[i])) {
+ ruleNetwork -= 8;
+ } else {
+ break;
+ }
+ } else {
+ int mask = 255 ^ 255 >>> ruleNetwork;
+ if ((Integer.parseInt(ruleIPParts[i]) & mask) ==
+ (Integer.parseInt(targetIPParts[i]) & mask)) {
+ ruleNetwork = 0;
+ }
+ break;
+ }
+ }
+ if (ruleNetwork > 0) {
+ /* IP address does not match. */
+ acceptRejectLines.append(line + "\n");
+ continue;
+ }
+ }
+ String rulePort = line.split(" ")[1].split(":")[1];
+ if (targetPort.length() < 1 && !ruleAccept &&
+ !rulePort.equals("*")) {
+ /* With no port given, we only consider reject :* rules as
+ matching. */
+ acceptRejectLines.append(line + "\n");
+ continue;
+ }
+ if (targetPort.length() > 0 && !rulePort.equals("*") &&
+ rulePort.contains("-")) {
+ int fromPort = Integer.parseInt(rulePort.split("-")[0]);
+ int toPort = Integer.parseInt(rulePort.split("-")[1]);
+ int targetPortInt = Integer.parseInt(targetPort);
+ if (targetPortInt < fromPort ||
+ targetPortInt > toPort) {
+ /* Port not contained in interval. */
+ continue;
+ }
+ }
+ if (targetPort.length() > 0) {
+ if (!rulePort.equals("*") &&
+ !rulePort.contains("-") &&
+ !targetPort.equals(rulePort)) {
+ /* Ports do not match. */
+ acceptRejectLines.append(line + "\n");
+ continue;
+ }
+ }
+ boolean relevantMatch = false;
+ for (long match : relevantDescriptors.get(descriptor)) {
+ if (relevantConsensuses.contains(match)) {
+ relevantMatch = true;
+ }
+ }
+ if (relevantMatch) {
+ String[] routerParts = routerLine.split(" ");
+ out.println("<pre><code>" + routerParts[0] + " "
+ + routerParts[1] + " <b>" + routerParts[2] + "</b> "
+ + routerParts[3] + " " + routerParts[4] + " "
+ + routerParts[5]);
+ String[] publishedParts = publishedLine.split(" ");
+ out.println(publishedParts[0] + " <b>"
+ + publishedParts[1] + " " + publishedParts[2]
+ + "</b>");
+ out.print(acceptRejectLines.toString());
+ out.println("<b>" + line + "</b>");
+ foundMatch = true;
+ }
+ if (ruleAccept) {
+ positiveConsensuses.addAll(
+ relevantDescriptors.get(descriptor));
+ }
+ }
+ }
+ br.close();
+ if (foundMatch) {
+ out.println("</code></pre>");
+ }
+ } catch (IOException e) {
+ /* Could not read descriptor string. */
+ continue;
+ }
+ }
+ }
+
+ /* Print out result. */
+ inMostRelevantConsensuses = false;
+ inOtherRelevantConsensus = false;
+ inTooOldConsensuses = false;
+ inTooNewConsensuses = false;
+ for (long match : positiveConsensuses) {
+ if (timestampIsDate &&
+ dateFormat.format(match).equals(timestampStr)) {
+ inMostRelevantConsensuses = true;
+ } else if (!timestampIsDate && match == relevantConsensuses.last()) {
+ inMostRelevantConsensuses = true;
+ } else if (relevantConsensuses.contains(match)) {
+ inOtherRelevantConsensus = true;
+ } else if (tooOldConsensuses.contains(match)) {
+ inTooOldConsensuses = true;
+ } else if (tooNewConsensuses.contains(match)) {
+ inTooNewConsensuses = true;
+ }
+ }
+ if (inMostRelevantConsensuses) {
+ out.print(" <p>Result is POSITIVE with high certainty!"
+ + "</p>\n"
+ + " <p>We found one or more relays on IP address "
+ + relayIP + " permitting exit to " + target + " in ");
+ if (timestampIsDate) {
+ out.print("relay list published on " + timestampStr);
+ } else {
+ out.print("the most recent relay list preceding " + timestampStr);
+ }
+ out.print(" that clients were likely to know.</p>\n");
+ writeFooter(out);
+ try {
+ conn.close();
+ this.logger.info("Returned a database connection to the pool "
+ + "after " + (System.currentTimeMillis()
+ - requestedConnection) + " millis.");
+ } catch (SQLException e) {
+ }
+ return;
+ }
+ boolean resultIndecisive = target.length() > 0
+ && !missingDescriptors.isEmpty();
+ if (resultIndecisive) {
+ out.println(" <p>Result is INDECISIVE!</p>\n"
+ + " <p>At least one referenced descriptor could not be "
+ + "found. This is a rare case, but one that (apparently) "
+ + "happens. We cannot make any good statement about exit "
+ + "relays without these descriptors. The following descriptors "
+ + "are missing:</p>");
+ for (String desc : missingDescriptors)
+ out.println(" <p>" + desc + "</p>\n");
+ }
+ if (inOtherRelevantConsensus) {
+ if (!resultIndecisive) {
+ out.println(" <p>Result is POSITIVE "
+ + "with moderate certainty!</p>\n");
+ }
+ out.println("<p>We found one or more relays on IP address "
+ + relayIP + " permitting exit to " + target + ", but not in ");
+ if (timestampIsDate) {
+ out.print("a relay list published on " + timestampStr);
+ } else {
+ out.print("the most recent relay list preceding " + timestampStr);
+ }
+ out.print(". A possible reason for the relay being missing in a "
+ + "relay list might be that some of the directory authorities "
+ + "had difficulties connecting to the relay. However, clients "
+ + "might still have used the relay.</p>\n");
+ } else {
+ if (!resultIndecisive) {
+ out.println(" <p>Result is NEGATIVE "
+ + "with high certainty!</p>\n");
+ }
+ out.println(" <p>We did not find any relay on IP address "
+ + relayIP + " permitting exit to " + target
+ + " in the relay list 3 hours preceding " + timestampStr
+ + ".</p>\n");
+ if (inTooOldConsensuses || inTooNewConsensuses) {
+ if (inTooOldConsensuses && !inTooNewConsensuses) {
+ out.println(" <p>Note that we found a matching relay in "
+ + "relay lists that were published between 15 and 3 "
+ + "hours before " + timestampStr + ".</p>\n");
+ } else if (!inTooOldConsensuses && inTooNewConsensuses) {
+ out.println(" <p>Note that we found a matching relay in "
+ + "relay lists that were published up to 12 hours after "
+ + timestampStr + ".</p>\n");
+ } else {
+ out.println(" <p>Note that we found a matching relay in "
+ + "relay lists that were published between 15 and 3 "
+ + "hours before and in relay lists that were published up "
+ + "to 12 hours after " + timestampStr + ".</p>\n");
+ }
+ if (timestampIsDate) {
+ out.println("<p>Be sure to try out the previous/next day or "
+ + "provide an exact timestamp in UTC.</p>");
+ } else {
+ out.println("<p>Make sure that the timestamp you provided is "
+ + "correctly converted to the UTC timezone.</p>");
+ }
+ }
+ }
+ if (target != null) {
+ if (positiveConsensuses.isEmpty() &&
+ !positiveConsensusesNoTarget.isEmpty()) {
+ out.println(" <p>Note that although the found relay(s) did "
+ + "not permit exiting to " + target + ", there have been one "
+ + "or more relays running at the given time.</p>");
+ }
+ }
+ try {
+ conn.close();
+ this.logger.info("Returned a database connection to the pool "
+ + "after " + (System.currentTimeMillis()
+ - requestedConnection) + " millis.");
+ } catch (SQLException e) {
+ }
+ writeFooter(out);
+ }
+}
+
diff --git a/src/org/torproject/ernie/status/exonerator/ServerDescriptorServlet.java b/src/org/torproject/ernie/status/exonerator/ServerDescriptorServlet.java
new file mode 100644
index 0000000..f94611e
--- /dev/null
+++ b/src/org/torproject/ernie/status/exonerator/ServerDescriptorServlet.java
@@ -0,0 +1,132 @@
+/* Copyright 2011, 2012 The Tor Project
+ * See LICENSE for licensing information */
+package org.torproject.ernie.status.exonerator;
+
+import java.io.BufferedOutputStream;
+import java.io.IOException;
+import java.sql.Connection;
+import java.sql.ResultSet;
+import java.sql.SQLException;
+import java.sql.Statement;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.logging.Level;
+import java.util.logging.Logger;
+import java.util.regex.Matcher;
+import java.util.regex.Pattern;
+
+import javax.naming.Context;
+import javax.naming.InitialContext;
+import javax.naming.NamingException;
+import javax.servlet.ServletException;
+import javax.servlet.http.HttpServlet;
+import javax.servlet.http.HttpServletRequest;
+import javax.servlet.http.HttpServletResponse;
+import javax.sql.DataSource;
+
+public class ServerDescriptorServlet extends HttpServlet {
+
+ private static final long serialVersionUID = -7935883442750583462L;
+
+ private DataSource ds;
+
+ private Logger logger;
+
+ public void init() {
+
+ /* Initialize logger. */
+ this.logger = Logger.getLogger(
+ ServerDescriptorServlet.class.toString());
+
+ /* Look up data source. */
+ try {
+ Context cxt = new InitialContext();
+ this.ds = (DataSource) cxt.lookup("java:comp/env/jdbc/exonerator");
+ this.logger.info("Successfully looked up data source.");
+ } catch (NamingException e) {
+ this.logger.log(Level.WARNING, "Could not look up data source", e);
+ }
+ }
+
+ public void doGet(HttpServletRequest request,
+ HttpServletResponse response) throws IOException,
+ ServletException {
+
+ /* Read desc-id parameter. */
+ String descIdParameter = request.getParameter("desc-id");
+
+ /* See if we were given a desc-id parameter. If so, look up this
+ * descriptor and return it. */
+ List<byte[]> rawDescriptors = new ArrayList<byte[]>();
+ String filename = null;
+ if (descIdParameter != null) {
+ if (descIdParameter.length() < 8 ||
+ descIdParameter.length() > 40) {
+ response.sendError(HttpServletResponse.SC_BAD_REQUEST);
+ return;
+ }
+ String descId = descIdParameter.toLowerCase();
+ Pattern descIdPattern = Pattern.compile("^[0-9a-f]+$");
+ Matcher descIdMatcher = descIdPattern.matcher(descId);
+ if (!descIdMatcher.matches()) {
+ response.sendError(HttpServletResponse.SC_BAD_REQUEST);
+ return;
+ }
+
+ /* Look up descriptor in the database. */
+ try {
+ long requestedConnection = System.currentTimeMillis();
+ Connection conn = ds.getConnection();
+ Statement statement = conn.createStatement();
+ String query = "SELECT descriptor, rawdescriptor FROM descriptor "
+ + "WHERE descriptor LIKE '" + descId + "%'";
+ ResultSet rs = statement.executeQuery(query);
+ if (rs.next()) {
+ filename = rs.getString(1);
+ rawDescriptors.add(rs.getBytes(2));
+ }
+ rs.close();
+ statement.close();
+ conn.close();
+ this.logger.info("Returned a database connection to the pool "
+ + "after " + (System.currentTimeMillis()
+ - requestedConnection) + " millis.");
+ } catch (SQLException e) {
+ response.sendError(HttpServletResponse.SC_INTERNAL_SERVER_ERROR);
+ return;
+ }
+
+ /* Return an error if no desc-id parameter was given. */
+ } else {
+ response.sendError(HttpServletResponse.SC_BAD_REQUEST);
+ return;
+ }
+
+ /* Write response. */
+ if (rawDescriptors.size() == 0) {
+ response.sendError(HttpServletResponse.SC_NOT_FOUND);
+ return;
+ }
+ try {
+ response.setContentType("text/plain");
+ int responseLength = 0;
+ for (byte[] rawDescriptor : rawDescriptors) {
+ responseLength += rawDescriptor.length;
+ }
+ response.setHeader("Content-Length", String.valueOf(
+ responseLength));
+ response.setHeader("Content-Disposition", "inline; filename=\""
+ + filename + "\"");
+ BufferedOutputStream output = new BufferedOutputStream(
+ response.getOutputStream());
+ for (byte[] rawDescriptor : rawDescriptors) {
+ output.write(rawDescriptor);
+ }
+ output.flush();
+ output.close();
+ } finally {
+ /* Nothing to do here. */
+ }
+ }
+}
+
diff --git a/src/org/torproject/ernie/status/relaysearch/RelaySearchServlet.java b/src/org/torproject/ernie/status/relaysearch/RelaySearchServlet.java
new file mode 100644
index 0000000..4e83bb5
--- /dev/null
+++ b/src/org/torproject/ernie/status/relaysearch/RelaySearchServlet.java
@@ -0,0 +1,505 @@
+/* Copyright 2011, 2012 The Tor Project
+ * See LICENSE for licensing information */
+package org.torproject.ernie.status.relaysearch;
+
+import java.io.IOException;
+import java.io.UnsupportedEncodingException;
+import java.math.BigInteger;
+import java.sql.Connection;
+import java.sql.ResultSet;
+import java.sql.SQLException;
+import java.sql.Statement;
+import java.text.ParseException;
+import java.text.SimpleDateFormat;
+import java.util.ArrayList;
+import java.util.Calendar;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.SortedMap;
+import java.util.SortedSet;
+import java.util.TimeZone;
+import java.util.TreeMap;
+import java.util.TreeSet;
+import java.util.logging.Level;
+import java.util.logging.Logger;
+import java.util.regex.Pattern;
+
+import javax.naming.Context;
+import javax.naming.InitialContext;
+import javax.naming.NamingException;
+import javax.servlet.ServletException;
+import javax.servlet.http.HttpServlet;
+import javax.servlet.http.HttpServletRequest;
+import javax.servlet.http.HttpServletResponse;
+import javax.sql.DataSource;
+
+import org.apache.commons.codec.binary.Base64;
+
+/**
+ * Web page that allows users to search for relays in the descriptor
+ * archives.
+ *
+ * Possible search terms for testing:
+ * - gabelmoo
+ * - gabelmoo 2010-09
+ * - gabelmoo 2010-09-18
+ * - gabelmoo $F2044413DAC2E02E3D6BCF4735A19BCA1DE97281
+ * - gabelmoo 80.190.246
+ * - gabelmoo $F2044413DAC2E02E3D6BCF4735A19BCA1DE97281 80.190.246
+ * - 5898549205 dc737cc9dca16af6 79.212.74.45
+ * - 5898549205 dc737cc9dca16af6
+ * - 80.190.246.100
+ * - $F2044413DAC2E02E3D6BCF4735A19BCA1DE97281
+ * - $F2044413DAC2E02E3D6BCF4735A19BCA1DE97281 80.190.246
+ * - 58985492
+ * - 58985492 79.212.74.45
+ */
+public class RelaySearchServlet extends HttpServlet {
+
+ private static final long serialVersionUID = -1772662230310611805L;
+
+ private Pattern alphaNumDotDashDollarSpacePattern =
+ Pattern.compile("[A-Za-z0-9\\.\\-$ ]+");
+
+ private Pattern numPattern = Pattern.compile("[0-9]+");
+
+ private Pattern hexPattern = Pattern.compile("[A-Fa-f0-9]+");
+
+ private Pattern alphaNumPattern = Pattern.compile("[A-Za-z0-9]+");
+
+ private SimpleDateFormat dayFormat = new SimpleDateFormat("yyyy-MM-dd");
+
+ private SimpleDateFormat monthFormat = new SimpleDateFormat("yyyy-MM");
+
+ private SimpleDateFormat dateTimeFormat =
+ new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
+
+ private long minValidAfterMillis;
+
+ private DataSource ds;
+
+ private Logger logger;
+
+ public void init() {
+
+ /* Initialize logger. */
+ this.logger = Logger.getLogger(RelaySearchServlet.class.toString());
+
+ /* Initialize date format parsers. */
+ dayFormat.setTimeZone(TimeZone.getTimeZone("UTC"));
+ monthFormat.setTimeZone(TimeZone.getTimeZone("UTC"));
+ dateTimeFormat.setTimeZone(TimeZone.getTimeZone("UTC"));
+
+ /* Look up data source. */
+ try {
+ Context cxt = new InitialContext();
+ this.ds = (DataSource) cxt.lookup("java:comp/env/jdbc/tordir");
+ this.logger.info("Successfully looked up data source.");
+ } catch (NamingException e) {
+ this.logger.log(Level.WARNING, "Could not look up data source", e);
+ }
+
+ /* Look up first consensus in the database. */
+ try {
+ long requestedConnection = System.currentTimeMillis();
+ Connection conn = this.ds.getConnection();
+ String query = "SELECT MIN(validafter) AS first FROM consensus";
+ Statement statement = conn.createStatement();
+ ResultSet rs = statement.executeQuery(query);
+ if (rs.next()) {
+ this.minValidAfterMillis = rs.getTimestamp(1).getTime();
+ }
+ rs.close();
+ statement.close();
+ conn.close();
+ this.logger.info("Returned a database connection to the pool "
+ + "after " + (System.currentTimeMillis()
+ - requestedConnection) + " millis.");
+ } catch (SQLException e) {
+ this.logger.log(Level.WARNING, "Could not look up first consensus "
+ + "valid-after time in the database.", e);
+ }
+ }
+
+ public void doGet(HttpServletRequest request,
+ HttpServletResponse response) throws IOException,
+ ServletException {
+
+ /* Read search parameter. If we don't have a search parameter, we're
+ * done here. */
+ String searchParameter = request.getParameter("search");
+ if (searchParameter == null || searchParameter.length() == 0) {
+ request.getRequestDispatcher("WEB-INF/relay-search.jsp").forward(
+ request, response);
+ return;
+ }
+
+ /* Parse search parameter to identify what nickname, fingerprint,
+ * and/or IP address to search for. A valid query contains no more
+ * than one identifier for each of the fields. As a special case,
+ * there are search terms consisting of 8 to 19 hex characters that
+ * can be either a nickname or a fingerprint. */
+ String searchNickname = "";
+ String searchFingerprint = "";
+ String searchIPAddress = "";
+ SortedSet<String> searchDays = new TreeSet<String>();
+ SortedSet<String> searchMonths = new TreeSet<String>();
+ SortedSet<Long> searchDayTimestamps = new TreeSet<Long>();
+ SortedSet<Long> searchMonthTimestamps = new TreeSet<Long>();
+ boolean validQuery = false;
+
+ /* Only parse search parameter if it contains nothing else than
+ * alphanumeric characters, dots, and spaces. */
+ if (alphaNumDotDashDollarSpacePattern.matcher(searchParameter).
+ matches()) {
+ SortedSet<String> searchTerms = new TreeSet<String>();
+ if (searchParameter.trim().contains(" ")) {
+ String[] split = searchParameter.trim().split(" ");
+ for (int i = 0; i < split.length; i++) {
+ if (split[i].length() > 0) {
+ searchTerms.add(split[i]);
+ }
+ }
+ } else {
+ searchTerms.add(searchParameter.trim());
+ }
+
+ /* Parse each search term separately. */
+ for (String searchTerm : searchTerms) {
+
+ /* If the search term contains a dot, it can only be an IP
+ * address. */
+ if (searchTerm.contains(".") && !searchTerm.startsWith(".")) {
+ String[] octets = searchTerm.split("\\.");
+ if (searchIPAddress.length() > 0 || octets.length < 2 ||
+ octets.length > 4) {
+ validQuery = false;
+ break;
+ }
+ boolean invalidOctet = false;
+ StringBuilder sb = new StringBuilder();
+ for (int i = 0; i < octets.length; i++) {
+ if (!numPattern.matcher(octets[i]).matches() ||
+ octets[i].length() > 3 ||
+ Integer.parseInt(octets[i]) > 255) {
+ invalidOctet = true;
+ break;
+ } else {
+ sb.append("." + Integer.parseInt(octets[i]));
+ }
+ }
+ if (invalidOctet) {
+ validQuery = false;
+ break;
+ }
+ if (octets.length < 4) {
+ sb.append(".");
+ }
+ searchIPAddress = sb.toString().substring(1);
+ validQuery = true;
+ }
+
+ /* If the search term contains hyphens, it must be a month or a
+ * day. */
+ else if (searchTerm.contains("-") &&
+ searchTerm.startsWith("20")) {
+ try {
+ if (searchTerm.length() == 10) {
+ searchDayTimestamps.add(dayFormat.parse(searchTerm).
+ getTime());
+ searchDays.add(searchTerm);
+ } else if (searchTerm.length() == 7) {
+ searchMonthTimestamps.add(monthFormat.parse(searchTerm).
+ getTime());
+ searchMonths.add(searchTerm);
+ } else {
+ validQuery = false;
+ break;
+ }
+ } catch (ParseException e) {
+ validQuery = false;
+ break;
+ }
+ }
+
+ /* If the search term starts with a $ followed by 8 to 40 hex
+ * characters, it must be a fingerprint. */
+ else if ((searchTerm.length() >= 9 && searchTerm.length() <= 41 &&
+ searchTerm.startsWith("$") &&
+ hexPattern.matcher(searchTerm.substring(1)).matches()) ||
+ (searchTerm.length() > 19 && searchTerm.length() <= 40 &&
+ !searchTerm.startsWith("$") &&
+ hexPattern.matcher(searchTerm).matches())) {
+ if (searchFingerprint.length() > 0) {
+ validQuery = false;
+ break;
+ }
+ searchFingerprint = searchTerm.substring(
+ (searchTerm.startsWith("$") ? 1 : 0));
+ validQuery = true;
+ }
+
+ /* If the search term contains up to 19 alphanumerical characters,
+ * it must be a nickname. */
+ else if (searchTerm.length() <= 19 &&
+ alphaNumPattern.matcher(searchTerm).matches()) {
+ if (searchNickname.length() > 0) {
+ validQuery = false;
+ break;
+ }
+ searchNickname = searchTerm;
+ validQuery = true;
+ }
+
+ /* We didn't recognize this search term. */
+ else {
+ validQuery = false;
+ break;
+ }
+ }
+ }
+
+ /* We only accept at most one month or three days, but not both, or
+ * people could accidentally keep the database busy. */
+ if (searchDays.size() > 3 || searchMonths.size() > 1 ||
+ (searchMonths.size() == 1 && searchDays.size() > 0)) {
+ validQuery = false;
+ }
+
+ /* If the query is invalid, stop here. */
+ if (!validQuery) {
+ request.setAttribute("invalidQuery", "Query is invalid.");
+ request.getRequestDispatcher("WEB-INF/relay-search.jsp").
+ forward(request, response);
+ return;
+ }
+
+ /* Prepare a string that says what we're searching for. */
+ List<String> recognizedSearchTerms = new ArrayList<String>();
+ if (searchNickname.length() > 0) {
+ recognizedSearchTerms.add("nickname <b>" + searchNickname + "</b>");
+ }
+ if (searchFingerprint.length() > 0) {
+ recognizedSearchTerms.add("fingerprint <b>" + searchFingerprint
+ + "</b>");
+ }
+ if (searchIPAddress.length() > 0) {
+ recognizedSearchTerms.add("IP address <b>" + searchIPAddress
+ + "</b>");
+ }
+ List<String> recognizedIntervals = new ArrayList<String>();
+ for (String searchTerm : searchMonths) {
+ recognizedIntervals.add("in <b>" + searchTerm + "</b>");
+ }
+ for (String searchTerm : searchDays) {
+ recognizedIntervals.add("on <b>" + searchTerm + "</b>");
+ }
+ StringBuilder searchNoticeBuilder = new StringBuilder();
+ searchNoticeBuilder.append("Searching for relays with ");
+ if (recognizedSearchTerms.size() == 1) {
+ searchNoticeBuilder.append(recognizedSearchTerms.get(0));
+ } else if (recognizedSearchTerms.size() == 2) {
+ searchNoticeBuilder.append(recognizedSearchTerms.get(0) + " and "
+ + recognizedSearchTerms.get(1));
+ } else {
+ for (int i = 0; i < recognizedSearchTerms.size() - 1; i++) {
+ searchNoticeBuilder.append(recognizedSearchTerms.get(i) + ", ");
+ }
+ searchNoticeBuilder.append("and " + recognizedSearchTerms.get(
+ recognizedSearchTerms.size() - 1));
+ }
+ if (recognizedIntervals.size() == 1) {
+ searchNoticeBuilder.append(" running "
+ + recognizedIntervals.get(0));
+ } else if (recognizedIntervals.size() == 2) {
+ searchNoticeBuilder.append(" running " + recognizedIntervals.get(0)
+ + " and/or " + recognizedIntervals.get(1));
+ } else if (recognizedIntervals.size() > 2) {
+ searchNoticeBuilder.append(" running ");
+ for (int i = 0; i < recognizedIntervals.size() - 1; i++) {
+ searchNoticeBuilder.append(recognizedIntervals.get(i) + ", ");
+ }
+ searchNoticeBuilder.append("and/or " + recognizedIntervals.get(
+ recognizedIntervals.size() - 1));
+ }
+ searchNoticeBuilder.append(" ...");
+ String searchNotice = searchNoticeBuilder.toString();
+ request.setAttribute("searchNotice", searchNotice);
+
+ /* Prepare the query string. */
+ StringBuilder conditionBuilder = new StringBuilder();
+ boolean addAnd = false;
+ if (searchNickname.length() > 0) {
+ conditionBuilder.append((addAnd ? "AND " : "")
+ + "LOWER(nickname) LIKE '" + searchNickname.toLowerCase()
+ + "%' ");
+ addAnd = true;
+ }
+ if (searchFingerprint.length() > 0) {
+ conditionBuilder.append((addAnd ? "AND " : "")
+ + "fingerprint LIKE '" + searchFingerprint.toLowerCase()
+ + "%' ");
+ addAnd = true;
+ }
+ if (searchIPAddress.length() > 0) {
+ conditionBuilder.append((addAnd ? "AND " : "")
+ + "address LIKE '" + searchIPAddress + "%' ");
+ addAnd = true;
+ }
+ List<String> timeIntervals = new ArrayList<String>();
+ if (searchDayTimestamps.size() > 0 ||
+ searchMonthTimestamps.size() > 0) {
+ StringBuilder timeIntervalBuilder = new StringBuilder();
+ boolean addOr = false;
+ timeIntervalBuilder.append("AND (");
+ for (long searchTimestamp : searchDayTimestamps) {
+ if (searchTimestamp < this.minValidAfterMillis) {
+ request.setAttribute("outsideInterval", "Returned search "
+ + "results may be incomplete, as our data only dates back "
+ + "to " + dateTimeFormat.format(this.minValidAfterMillis)
+ + ". Older archives are not available.");
+ }
+ timeIntervalBuilder.append((addOr ? "OR " : "")
+ + "(validafter >= '"
+ + dateTimeFormat.format(searchTimestamp) + "' AND "
+ + "validafter < '" + dateTimeFormat.format(searchTimestamp
+ + 24L * 60L * 60L * 1000L) + "') ");
+ addOr = true;
+ }
+ for (long searchTimestamp : searchMonthTimestamps) {
+ if (searchTimestamp < this.minValidAfterMillis) {
+ request.setAttribute("outsideInterval", "Returned search "
+ + "results may be incomplete, as our data only dates back "
+ + "to " + dateTimeFormat.format(this.minValidAfterMillis)
+ + ". Older archives are not available.");
+ }
+ Calendar firstOfNextMonth = Calendar.getInstance(
+ TimeZone.getTimeZone("UTC"));
+ firstOfNextMonth.setTimeInMillis(searchTimestamp);
+ firstOfNextMonth.add(Calendar.MONTH, 1);
+ timeIntervalBuilder.append((addOr ? "OR " : "")
+ + "(validafter >= '"
+ + dateTimeFormat.format(searchTimestamp) + "' AND "
+ + "validafter < '" + dateTimeFormat.format(
+ firstOfNextMonth.getTimeInMillis()) + "') ");
+ addOr = true;
+ }
+ timeIntervalBuilder.append(") ");
+ timeIntervals.add(timeIntervalBuilder.toString());
+ } else {
+ timeIntervals.add("AND validafter >= '"
+ + dateTimeFormat.format(System.currentTimeMillis()
+ - 4L * 24L * 60L * 60L * 1000L) + "' ");
+ timeIntervals.add("AND validafter >= '"
+ + dateTimeFormat.format(System.currentTimeMillis()
+ - 30L * 24L * 60L * 60L * 1000L) + "' ");
+ }
+ List<String> queries = new ArrayList<String>();
+ for (String timeInterval : timeIntervals) {
+ StringBuilder queryBuilder = new StringBuilder();
+ queryBuilder.append("SELECT validafter, fingerprint, descriptor, "
+ + "rawdesc FROM statusentry WHERE validafter IN (SELECT "
+ + "validafter FROM statusentry WHERE ");
+ queryBuilder.append(conditionBuilder.toString());
+ queryBuilder.append(timeInterval);
+ queryBuilder.append("ORDER BY validafter DESC LIMIT 31) AND ");
+ queryBuilder.append(conditionBuilder.toString());
+ queryBuilder.append(timeInterval);
+ queries.add(queryBuilder.toString());
+ }
+
+ /* Actually execute the query. */
+ long startedQuery = System.currentTimeMillis();
+ SortedMap<String, SortedSet<String>> foundDescriptors =
+ new TreeMap<String, SortedSet<String>>(
+ Collections.reverseOrder());
+ Map<String, String> rawValidAfterLines =
+ new HashMap<String, String>();
+ Map<String, String> rawStatusEntries = new HashMap<String, String>();
+ String query = null;
+ int matches = 0;
+ try {
+ long requestedConnection = System.currentTimeMillis();
+ Connection conn = this.ds.getConnection();
+ while (!queries.isEmpty()) {
+ query = queries.remove(0);
+ this.logger.info("Running query '" + query + "'.");
+ Statement statement = conn.createStatement();
+ ResultSet rs = statement.executeQuery(query);
+ while (rs.next()) {
+ matches++;
+ String validAfter = rs.getTimestamp(1).toString().
+ substring(0, 19);
+ String fingerprint = rs.getString(2);
+ if (!foundDescriptors.containsKey(validAfter)) {
+ foundDescriptors.put(validAfter, new TreeSet<String>());
+ }
+ foundDescriptors.get(validAfter).add(validAfter + " "
+ + fingerprint);
+ if (!rawValidAfterLines.containsKey(validAfter)) {
+ rawValidAfterLines.put(validAfter, "<tt>valid-after "
+ + "<a href=\"consensus?valid-after="
+ + validAfter.replaceAll(":", "-").replaceAll(" ", "-")
+ + "\" target=\"_blank\">" + validAfter + "</a></tt><br>");
+ }
+ byte[] rawStatusEntry = rs.getBytes(4);
+ String statusEntryLines = null;
+ try {
+ statusEntryLines = new String(rawStatusEntry, "US-ASCII");
+ } catch (UnsupportedEncodingException e) {
+ /* This shouldn't happen, because we know that ASCII is
+ * supported. */
+ }
+ StringBuilder rawStatusEntryBuilder = new StringBuilder();
+ String[] lines = statusEntryLines.split("\n");
+ for (String line : lines) {
+ if (line.startsWith("r ")) {
+ String[] parts = line.split(" ");
+ String descriptorBase64 = String.format("%040x",
+ new BigInteger(1, Base64.decodeBase64(parts[3]
+ + "==")));
+ rawStatusEntryBuilder.append("<tt>r " + parts[1] + " "
+ + parts[2] + " <a href=\"serverdesc?desc-id="
+ + descriptorBase64 + "\" target=\"_blank\">" + parts[3]
+ + "</a> " + parts[4] + " " + parts[5] + " " + parts[6]
+ + " " + parts[7] + " " + parts[8] + "</tt><br>");
+ } else {
+ rawStatusEntryBuilder.append("<tt>" + line + "</tt><br>");
+ }
+ }
+ rawStatusEntries.put(validAfter + " " + fingerprint,
+ rawStatusEntryBuilder.toString());
+ }
+ rs.close();
+ statement.close();
+ if (matches >= 31) {
+ queries.clear();
+ }
+ }
+ conn.close();
+ this.logger.info("Returned a database connection to the pool "
+ + "after " + (System.currentTimeMillis()
+ - requestedConnection) + " millis.");
+ } catch (SQLException e) {
+
+ /* Tell the user we have a database problem. */
+ response.sendError(HttpServletResponse.SC_INTERNAL_SERVER_ERROR,
+ "Database problem");
+ return;
+ }
+ request.setAttribute("query", query);
+ request.setAttribute("queryTime", System.currentTimeMillis()
+ - startedQuery);
+ request.setAttribute("foundDescriptors", foundDescriptors);
+ request.setAttribute("rawValidAfterLines", rawValidAfterLines);
+ request.setAttribute("rawStatusEntries", rawStatusEntries);
+ request.setAttribute("matches", matches);
+
+ /* We're done. Let the JSP do the rest. */
+ request.getRequestDispatcher("WEB-INF/relay-search.jsp").forward(
+ request, response);
+ }
+}
+
diff --git a/src/org/torproject/ernie/web/ConsensusHealthServlet.java b/src/org/torproject/ernie/web/ConsensusHealthServlet.java
deleted file mode 100644
index 2ca26d5..0000000
--- a/src/org/torproject/ernie/web/ConsensusHealthServlet.java
+++ /dev/null
@@ -1,57 +0,0 @@
-/* Copyright 2011, 2012 The Tor Project
- * See LICENSE for licensing information */
-package org.torproject.ernie.web;
-
-import java.io.BufferedInputStream;
-import java.io.BufferedOutputStream;
-import java.io.File;
-import java.io.FileInputStream;
-import java.io.IOException;
-
-import javax.servlet.ServletException;
-import javax.servlet.http.HttpServlet;
-import javax.servlet.http.HttpServletRequest;
-import javax.servlet.http.HttpServletResponse;
-
-public class ConsensusHealthServlet extends HttpServlet {
-
- private static final long serialVersionUID = -5230032733057814869L;
-
- public void doGet(HttpServletRequest request,
- HttpServletResponse response) throws IOException,
- ServletException {
-
- /* Read file from disk and write it to response. */
- BufferedInputStream input = null;
- BufferedOutputStream output = null;
- try {
- File f = new File("/srv/metrics.torproject.org/ernie/website/"
- + "consensus-health.html");
- if (!f.exists()) {
- response.sendError(HttpServletResponse.SC_NOT_FOUND);
- return;
- }
- response.setContentType(this.getServletContext().getMimeType(f.getName()));
- response.setHeader("Content-Length", String.valueOf(
- f.length()));
- response.setHeader("Content-Disposition",
- "inline; filename=\"" + f.getName() + "\"");
- input = new BufferedInputStream(new FileInputStream(f),
- 1024);
- output = new BufferedOutputStream(response.getOutputStream(), 1024);
- byte[] buffer = new byte[1024];
- int length;
- while ((length = input.read(buffer)) > 0) {
- output.write(buffer, 0, length);
- }
- } finally {
- if (output != null) {
- output.close();
- }
- if (input != null) {
- input.close();
- }
- }
- }
-}
-
diff --git a/src/org/torproject/ernie/web/ConsensusServlet.java b/src/org/torproject/ernie/web/ConsensusServlet.java
deleted file mode 100644
index 5f642fe..0000000
--- a/src/org/torproject/ernie/web/ConsensusServlet.java
+++ /dev/null
@@ -1,124 +0,0 @@
-/* Copyright 2011, 2012 The Tor Project
- * See LICENSE for licensing information */
-package org.torproject.ernie.web;
-
-import java.io.BufferedOutputStream;
-import java.io.IOException;
-import java.sql.Connection;
-import java.sql.ResultSet;
-import java.sql.SQLException;
-import java.sql.Statement;
-import java.text.ParseException;
-import java.text.SimpleDateFormat;
-import java.util.TimeZone;
-import java.util.logging.Level;
-import java.util.logging.Logger;
-
-import javax.naming.Context;
-import javax.naming.InitialContext;
-import javax.naming.NamingException;
-import javax.servlet.ServletException;
-import javax.servlet.http.HttpServlet;
-import javax.servlet.http.HttpServletRequest;
-import javax.servlet.http.HttpServletResponse;
-import javax.sql.DataSource;
-
-public class ConsensusServlet extends HttpServlet {
-
- private static final long serialVersionUID = 3147332016303032164L;
-
- private DataSource ds;
-
- private Logger logger;
-
- public void init() {
-
- /* Initialize logger. */
- this.logger = Logger.getLogger(ConsensusServlet.class.toString());
-
- /* Look up data source. */
- try {
- Context cxt = new InitialContext();
- this.ds = (DataSource) cxt.lookup("java:comp/env/jdbc/exonerator");
- this.logger.info("Successfully looked up data source.");
- } catch (NamingException e) {
- this.logger.log(Level.WARNING, "Could not look up data source", e);
- }
- }
-
- public void doGet(HttpServletRequest request,
- HttpServletResponse response) throws IOException,
- ServletException {
-
- /* Check valid-after parameter. */
- String validAfterParameter = request.getParameter("valid-after");
- if (validAfterParameter == null ||
- validAfterParameter.length() != "yyyy-MM-dd-HH-mm-ss".length()) {
- response.sendError(HttpServletResponse.SC_BAD_REQUEST);
- return;
- }
- SimpleDateFormat parameterFormat = new SimpleDateFormat(
- "yyyy-MM-dd-HH-mm-ss");
- parameterFormat.setTimeZone(TimeZone.getTimeZone("UTC"));
- long parsedTimestamp = -1L;
- try {
- parsedTimestamp = parameterFormat.parse(validAfterParameter).
- getTime();
- } catch (ParseException e) {
- response.sendError(HttpServletResponse.SC_BAD_REQUEST);
- return;
- }
- if (parsedTimestamp < 0L) {
- response.sendError(HttpServletResponse.SC_BAD_REQUEST);
- return;
- }
-
- /* Look up consensus in the database. */
- SimpleDateFormat databaseFormat = new SimpleDateFormat(
- "yyyy-MM-dd HH:mm:ss");
- databaseFormat.setTimeZone(TimeZone.getTimeZone("UTC"));
- String databaseParameter = databaseFormat.format(parsedTimestamp);
- byte[] rawDescriptor = null;
- try {
- long requestedConnection = System.currentTimeMillis();
- Connection conn = this.ds.getConnection();
- Statement statement = conn.createStatement();
- String query = "SELECT rawconsensus FROM consensus "
- + "WHERE validafter = '" + databaseParameter + "'";
- ResultSet rs = statement.executeQuery(query);
- if (rs.next()) {
- rawDescriptor = rs.getBytes(1);
- }
- rs.close();
- statement.close();
- conn.close();
- this.logger.info("Returned a database connection to the pool after "
- + (System.currentTimeMillis() - requestedConnection)
- + " millis.");
- } catch (SQLException e) {
- response.sendError(HttpServletResponse.SC_INTERNAL_SERVER_ERROR);
- return;
- }
-
- /* Write response. */
- if (rawDescriptor == null) {
- response.sendError(HttpServletResponse.SC_NOT_FOUND);
- return;
- }
- try {
- response.setContentType("text/plain");
- response.setHeader("Content-Length", String.valueOf(
- rawDescriptor.length));
- response.setHeader("Content-Disposition", "inline; filename=\""
- + validAfterParameter + "-consensus\"");
- BufferedOutputStream output = new BufferedOutputStream(
- response.getOutputStream());
- output.write(rawDescriptor);
- output.flush();
- output.close();
- } finally {
- /* Nothing to do here. */
- }
- }
-}
-
diff --git a/src/org/torproject/ernie/web/Countries.java b/src/org/torproject/ernie/web/Countries.java
deleted file mode 100644
index 10406a5..0000000
--- a/src/org/torproject/ernie/web/Countries.java
+++ /dev/null
@@ -1,284 +0,0 @@
-/* Copyright 2011, 2012 The Tor Project
- * See LICENSE for licensing information */
-package org.torproject.ernie.web;
-
-import java.util.ArrayList;
-import java.util.List;
-
-public class Countries {
-
- private static Countries instance = new Countries();
-
- public static Countries getInstance() {
- return Countries.instance;
- }
-
- /* List of arrays of length 2, containing country codes at [0] and
- * country names at [1], alphabetically ordered by country names. */
- private List<String[]> knownCountries;
-
- private Countries() {
- this.knownCountries = new ArrayList<String[]>();
- this.knownCountries.add("af;Afghanistan".split(";"));
- this.knownCountries.add("ax;Aland Islands".split(";"));
- this.knownCountries.add("al;Albania".split(";"));
- this.knownCountries.add("dz;Algeria".split(";"));
- this.knownCountries.add("as;American Samoa".split(";"));
- this.knownCountries.add("ad;Andorra".split(";"));
- this.knownCountries.add("ao;Angola".split(";"));
- this.knownCountries.add("ai;Anguilla".split(";"));
- this.knownCountries.add("aq;Antarctica".split(";"));
- this.knownCountries.add("ag;Antigua and Barbuda".split(";"));
- this.knownCountries.add("ar;Argentina".split(";"));
- this.knownCountries.add("am;Armenia".split(";"));
- this.knownCountries.add("aw;Aruba".split(";"));
- this.knownCountries.add("au;Australia".split(";"));
- this.knownCountries.add("at;Austria".split(";"));
- this.knownCountries.add("az;Azerbaijan".split(";"));
- this.knownCountries.add("bs;Bahamas".split(";"));
- this.knownCountries.add("bh;Bahrain".split(";"));
- this.knownCountries.add("bd;Bangladesh".split(";"));
- this.knownCountries.add("bb;Barbados".split(";"));
- this.knownCountries.add("by;Belarus".split(";"));
- this.knownCountries.add("be;Belgium".split(";"));
- this.knownCountries.add("bz;Belize".split(";"));
- this.knownCountries.add("bj;Benin".split(";"));
- this.knownCountries.add("bm;Bermuda".split(";"));
- this.knownCountries.add("bt;Bhutan".split(";"));
- this.knownCountries.add("bo;Bolivia".split(";"));
- this.knownCountries.add("ba;Bosnia and Herzegovina".split(";"));
- this.knownCountries.add("bw;Botswana".split(";"));
- this.knownCountries.add("bv;Bouvet Island".split(";"));
- this.knownCountries.add("br;Brazil".split(";"));
- this.knownCountries.add("io;British Indian Ocean Territory".
- split(";"));
- this.knownCountries.add("bn;Brunei".split(";"));
- this.knownCountries.add("bg;Bulgaria".split(";"));
- this.knownCountries.add("bf;Burkina Faso".split(";"));
- this.knownCountries.add("mm;Burma".split(";"));
- this.knownCountries.add("bi;Burundi".split(";"));
- this.knownCountries.add("kh;Cambodia".split(";"));
- this.knownCountries.add("cm;Cameroon".split(";"));
- this.knownCountries.add("ca;Canada".split(";"));
- this.knownCountries.add("cv;Cape Verde".split(";"));
- this.knownCountries.add("ky;Cayman Islands".split(";"));
- this.knownCountries.add("cf;Central African Republic".split(";"));
- this.knownCountries.add("td;Chad".split(";"));
- this.knownCountries.add("cl;Chile".split(";"));
- this.knownCountries.add("cn;China".split(";"));
- this.knownCountries.add("cx;Christmas Island".split(";"));
- this.knownCountries.add("cc;Cocos (Keeling) Islands".split(";"));
- this.knownCountries.add("co;Colombia".split(";"));
- this.knownCountries.add("km;Comoros".split(";"));
- this.knownCountries.add("cd;Congo, The Democratic Republic of the".
- split(";"));
- this.knownCountries.add("cg;Congo".split(";"));
- this.knownCountries.add("ck;Cook Islands".split(";"));
- this.knownCountries.add("cr;Costa Rica".split(";"));
- this.knownCountries.add("ci:Côte d'Ivoire".split(":"));
- this.knownCountries.add("hr;Croatia".split(";"));
- this.knownCountries.add("cu;Cuba".split(";"));
- this.knownCountries.add("cy;Cyprus".split(";"));
- this.knownCountries.add("cz;Czech Republic".split(";"));
- this.knownCountries.add("dk;Denmark".split(";"));
- this.knownCountries.add("dj;Djibouti".split(";"));
- this.knownCountries.add("dm;Dominica".split(";"));
- this.knownCountries.add("do;Dominican Republic".split(";"));
- this.knownCountries.add("ec;Ecuador".split(";"));
- this.knownCountries.add("eg;Egypt".split(";"));
- this.knownCountries.add("sv;El Salvador".split(";"));
- this.knownCountries.add("gq;Equatorial Guinea".split(";"));
- this.knownCountries.add("er;Eritrea".split(";"));
- this.knownCountries.add("ee;Estonia".split(";"));
- this.knownCountries.add("et;Ethiopia".split(";"));
- this.knownCountries.add("fk;Falkland Islands (Malvinas)".split(";"));
- this.knownCountries.add("fo;Faroe Islands".split(";"));
- this.knownCountries.add("fj;Fiji".split(";"));
- this.knownCountries.add("fi;Finland".split(";"));
- this.knownCountries.add("fx;France, Metropolitan".split(";"));
- this.knownCountries.add("fr;France".split(";"));
- this.knownCountries.add("gf;French Guiana".split(";"));
- this.knownCountries.add("pf;French Polynesia".split(";"));
- this.knownCountries.add("tf;French Southern Territories".split(";"));
- this.knownCountries.add("ga;Gabon".split(";"));
- this.knownCountries.add("gm;Gambia".split(";"));
- this.knownCountries.add("ge;Georgia".split(";"));
- this.knownCountries.add("de;Germany".split(";"));
- this.knownCountries.add("gh;Ghana".split(";"));
- this.knownCountries.add("gi;Gibraltar".split(";"));
- this.knownCountries.add("gr;Greece".split(";"));
- this.knownCountries.add("gl;Greenland".split(";"));
- this.knownCountries.add("gd;Grenada".split(";"));
- this.knownCountries.add("gp;Guadeloupe".split(";"));
- this.knownCountries.add("gu;Guam".split(";"));
- this.knownCountries.add("gt;Guatemala".split(";"));
- this.knownCountries.add("gg;Guernsey".split(";"));
- this.knownCountries.add("gn;Guinea".split(";"));
- this.knownCountries.add("gw;Guinea-Bissau".split(";"));
- this.knownCountries.add("gy;Guyana".split(";"));
- this.knownCountries.add("ht;Haiti".split(";"));
- this.knownCountries.add("hm;Heard Island and McDonald Islands".
- split(";"));
- this.knownCountries.add("va;Vatican City".split(";"));
- this.knownCountries.add("hn;Honduras".split(";"));
- this.knownCountries.add("hk;Hong Kong".split(";"));
- this.knownCountries.add("hu;Hungary".split(";"));
- this.knownCountries.add("is;Iceland".split(";"));
- this.knownCountries.add("in;India".split(";"));
- this.knownCountries.add("id;Indonesia".split(";"));
- this.knownCountries.add("ir;Iran".split(";"));
- this.knownCountries.add("iq;Iraq".split(";"));
- this.knownCountries.add("ie;Ireland".split(";"));
- this.knownCountries.add("im;Isle of Man".split(";"));
- this.knownCountries.add("il;Israel".split(";"));
- this.knownCountries.add("it;Italy".split(";"));
- this.knownCountries.add("jm;Jamaica".split(";"));
- this.knownCountries.add("jp;Japan".split(";"));
- this.knownCountries.add("je;Jersey".split(";"));
- this.knownCountries.add("jo;Jordan".split(";"));
- this.knownCountries.add("kz;Kazakhstan".split(";"));
- this.knownCountries.add("ke;Kenya".split(";"));
- this.knownCountries.add("ki;Kiribati".split(";"));
- this.knownCountries.add("kp;North Korea".split(";"));
- this.knownCountries.add("kr;Korea, Republic of".split(";"));
- this.knownCountries.add("kw;Kuwait".split(";"));
- this.knownCountries.add("kg;Kyrgyzstan".split(";"));
- this.knownCountries.add("la;Laos".split(";"));
- this.knownCountries.add("lv;Latvia".split(";"));
- this.knownCountries.add("lb;Lebanon".split(";"));
- this.knownCountries.add("ls;Lesotho".split(";"));
- this.knownCountries.add("lr;Liberia".split(";"));
- this.knownCountries.add("ly;Libya".split(";"));
- this.knownCountries.add("li;Liechtenstein".split(";"));
- this.knownCountries.add("lt;Lithuania".split(";"));
- this.knownCountries.add("lu;Luxembourg".split(";"));
- this.knownCountries.add("mo;Macau".split(";"));
- this.knownCountries.add("mk;Macedonia".split(";"));
- this.knownCountries.add("mg;Madagascar".split(";"));
- this.knownCountries.add("mw;Malawi".split(";"));
- this.knownCountries.add("my;Malaysia".split(";"));
- this.knownCountries.add("mv;Maldives".split(";"));
- this.knownCountries.add("ml;Mali".split(";"));
- this.knownCountries.add("mt;Malta".split(";"));
- this.knownCountries.add("mh;Marshall Islands".split(";"));
- this.knownCountries.add("mq;Martinique".split(";"));
- this.knownCountries.add("mr;Mauritania".split(";"));
- this.knownCountries.add("mu;Mauritius".split(";"));
- this.knownCountries.add("yt;Mayotte".split(";"));
- this.knownCountries.add("mx;Mexico".split(";"));
- this.knownCountries.add("fm;Micronesia, Federated States of".
- split(";"));
- this.knownCountries.add("md;Moldova, Republic of".split(";"));
- this.knownCountries.add("mc;Monaco".split(";"));
- this.knownCountries.add("mn;Mongolia".split(";"));
- this.knownCountries.add("me;Montenegro".split(";"));
- this.knownCountries.add("ms;Montserrat".split(";"));
- this.knownCountries.add("ma;Morocco".split(";"));
- this.knownCountries.add("mz;Mozambique".split(";"));
- this.knownCountries.add("mm;Burma".split(";"));
- this.knownCountries.add("na;Namibia".split(";"));
- this.knownCountries.add("nr;Nauru".split(";"));
- this.knownCountries.add("np;Nepal".split(";"));
- this.knownCountries.add("an;Netherlands Antilles".split(";"));
- this.knownCountries.add("nl;Netherlands".split(";"));
- this.knownCountries.add("nc;New Caledonia".split(";"));
- this.knownCountries.add("nz;New Zealand".split(";"));
- this.knownCountries.add("ni;Nicaragua".split(";"));
- this.knownCountries.add("ne;Niger".split(";"));
- this.knownCountries.add("ng;Nigeria".split(";"));
- this.knownCountries.add("nu;Niue".split(";"));
- this.knownCountries.add("nf;Norfolk Island".split(";"));
- this.knownCountries.add("mp;Northern Mariana Islands".split(";"));
- this.knownCountries.add("no;Norway".split(";"));
- this.knownCountries.add("om;Oman".split(";"));
- this.knownCountries.add("pk;Pakistan".split(";"));
- this.knownCountries.add("pw;Palau".split(";"));
- this.knownCountries.add("ps;Palestinian Territory".split(";"));
- this.knownCountries.add("pa;Panama".split(";"));
- this.knownCountries.add("pg;Papua New Guinea".split(";"));
- this.knownCountries.add("py;Paraguay".split(";"));
- this.knownCountries.add("pe;Peru".split(";"));
- this.knownCountries.add("ph;Philippines".split(";"));
- this.knownCountries.add("pn;Pitcairn Islands".split(";"));
- this.knownCountries.add("pl;Poland".split(";"));
- this.knownCountries.add("pt;Portugal".split(";"));
- this.knownCountries.add("pr;Puerto Rico".split(";"));
- this.knownCountries.add("qa;Qatar".split(";"));
- this.knownCountries.add("re;Reunion".split(";"));
- this.knownCountries.add("ro;Romania".split(";"));
- this.knownCountries.add("ru;Russia".split(";"));
- this.knownCountries.add("rw;Rwanda".split(";"));
- this.knownCountries.add("bl;Saint Bartelemey".split(";"));
- this.knownCountries.add("sh;Saint Helena".split(";"));
- this.knownCountries.add("kn;Saint Kitts and Nevis".split(";"));
- this.knownCountries.add("lc;Saint Lucia".split(";"));
- this.knownCountries.add("mf;Saint Martin".split(";"));
- this.knownCountries.add("pm;Saint Pierre and Miquelon".split(";"));
- this.knownCountries.add("vc;Saint Vincent and the Grenadines".
- split(";"));
- this.knownCountries.add("ws;Samoa".split(";"));
- this.knownCountries.add("sm;San Marino".split(";"));
- this.knownCountries.add("st:São Tomé and Príncipe".
- split(":"));
- this.knownCountries.add("sa;Saudi Arabia".split(";"));
- this.knownCountries.add("sn;Senegal".split(";"));
- this.knownCountries.add("rs;Serbia".split(";"));
- this.knownCountries.add("sc;Seychelles".split(";"));
- this.knownCountries.add("sl;Sierra Leone".split(";"));
- this.knownCountries.add("sg;Singapore".split(";"));
- this.knownCountries.add("sk;Slovakia".split(";"));
- this.knownCountries.add("si;Slovenia".split(";"));
- this.knownCountries.add("sb;Solomon Islands".split(";"));
- this.knownCountries.add("so;Somalia".split(";"));
- this.knownCountries.add("za;South Africa".split(";"));
- this.knownCountries.add(("gs;South Georgia and the South Sandwich "
- + "Islands").split(";"));
- this.knownCountries.add("es;Spain".split(";"));
- this.knownCountries.add("lk;Sri Lanka".split(";"));
- this.knownCountries.add("sd;Sudan".split(";"));
- this.knownCountries.add("sr;Suriname".split(";"));
- this.knownCountries.add("sj;Svalbard and Jan Mayen".split(";"));
- this.knownCountries.add("sz;Swaziland".split(";"));
- this.knownCountries.add("se;Sweden".split(";"));
- this.knownCountries.add("ch;Switzerland".split(";"));
- this.knownCountries.add("sy;Syrian Arab Republic".split(";"));
- this.knownCountries.add("tw;Taiwan".split(";"));
- this.knownCountries.add("tj;Tajikistan".split(";"));
- this.knownCountries.add("tz;Tanzania, United Republic of".split(";"));
- this.knownCountries.add("th;Thailand".split(";"));
- this.knownCountries.add("tl;East Timor".split(";"));
- this.knownCountries.add("tg;Togo".split(";"));
- this.knownCountries.add("tk;Tokelau".split(";"));
- this.knownCountries.add("to;Tonga".split(";"));
- this.knownCountries.add("tt;Trinidad and Tobago".split(";"));
- this.knownCountries.add("tn;Tunisia".split(";"));
- this.knownCountries.add("tr;Turkey".split(";"));
- this.knownCountries.add("tm;Turkmenistan".split(";"));
- this.knownCountries.add("tc;Turks and Caicos Islands".split(";"));
- this.knownCountries.add("tv;Tuvalu".split(";"));
- this.knownCountries.add("ug;Uganda".split(";"));
- this.knownCountries.add("ua;Ukraine".split(";"));
- this.knownCountries.add("ae;United Arab Emirates".split(";"));
- this.knownCountries.add("gb;United Kingdom".split(";"));
- this.knownCountries.add("um;United States Minor Outlying Islands".
- split(";"));
- this.knownCountries.add("us;United States".split(";"));
- this.knownCountries.add("uy;Uruguay".split(";"));
- this.knownCountries.add("uz;Uzbekistan".split(";"));
- this.knownCountries.add("vu;Vanuatu".split(";"));
- this.knownCountries.add("ve;Venezuela".split(";"));
- this.knownCountries.add("vn;Vietnam".split(";"));
- this.knownCountries.add("vg;Virgin Islands, British".split(";"));
- this.knownCountries.add("vi;Virgin Islands, U.S.".split(";"));
- this.knownCountries.add("wf;Wallis and Futuna".split(";"));
- this.knownCountries.add("eh;Western Sahara".split(";"));
- this.knownCountries.add("ye;Yemen".split(";"));
- this.knownCountries.add("zm;Zambia".split(";"));
- this.knownCountries.add("zw;Zimbabwe".split(";"));
- }
-
- public List<String[]> getCountryList() {
- return this.knownCountries;
- }
-}
-
diff --git a/src/org/torproject/ernie/web/CsvServlet.java b/src/org/torproject/ernie/web/CsvServlet.java
deleted file mode 100644
index 2467d55..0000000
--- a/src/org/torproject/ernie/web/CsvServlet.java
+++ /dev/null
@@ -1,97 +0,0 @@
-/* Copyright 2011, 2012 The Tor Project
- * See LICENSE for licensing information */
-package org.torproject.ernie.web;
-
-import java.io.IOException;
-import java.util.SortedSet;
-import java.util.logging.Logger;
-
-import javax.servlet.ServletException;
-import javax.servlet.http.HttpServlet;
-import javax.servlet.http.HttpServletRequest;
-import javax.servlet.http.HttpServletResponse;
-
-/**
- * Servlet that reads an HTTP request for a comma-separated value file,
- * asks the GraphGenerator to generate this file, and returns it to the
- * client.
- */
-public class CsvServlet extends HttpServlet {
-
- private static final long serialVersionUID = 7501442926823719958L;
-
- private RObjectGenerator rObjectGenerator;
-
- /* Available CSV files. */
- private SortedSet<String> availableCsvFiles;
-
- private Logger logger;
-
- public void init() {
-
- /* Initialize logger. */
- this.logger = Logger.getLogger(CsvServlet.class.toString());
-
- /* Get a reference to the R object generator that we need to generate
- * CSV files. */
- this.rObjectGenerator = (RObjectGenerator) getServletContext().
- getAttribute("RObjectGenerator");
- this.availableCsvFiles = rObjectGenerator.getAvailableCsvFiles();
- }
-
- public void doGet(HttpServletRequest request,
- HttpServletResponse response) throws IOException,
- ServletException {
-
- /* Check if the directory listing was requested. */
- String requestURI = request.getRequestURI();
- if (requestURI.equals("/ernie/csv/")) {
- request.setAttribute("directory", "/csv");
- request.setAttribute("extension", ".csv");
- request.setAttribute("files", this.availableCsvFiles);
- request.getRequestDispatcher("/WEB-INF/dir.jsp").forward(request,
- response);
- return;
- }
-
- /* Find out which CSV file was requested and make sure we know this
- * CSV file type. */
- String requestedCsvFile = requestURI;
- if (requestedCsvFile.endsWith(".csv")) {
- requestedCsvFile = requestedCsvFile.substring(0,
- requestedCsvFile.length() - ".csv".length());
- }
- if (requestedCsvFile.contains("/")) {
- requestedCsvFile = requestedCsvFile.substring(requestedCsvFile.
- lastIndexOf("/") + 1);
- }
- if (!availableCsvFiles.contains(requestedCsvFile)) {
- logger.info("Did not recognize requested .csv file from request "
- + "URI: '" + requestURI + "'. Responding with 404 Not Found.");
- response.sendError(HttpServletResponse.SC_NOT_FOUND);
- return;
- }
- logger.fine("CSV file '" + requestedCsvFile + ".csv' requested.");
-
- /* Request CSV file from R object generator, which asks Rserve to
- * generate it. */
- RObject csvFile = this.rObjectGenerator.generateCsv(
- requestedCsvFile, true);
-
- /* Make sure that we have a .csv file to return. */
- if (csvFile == null) {
- response.sendError(HttpServletResponse.SC_INTERNAL_SERVER_ERROR);
- return;
- }
-
- /* Write CSV file to response. */
- String csvFileContent = new String(csvFile.getBytes());
- response.setContentType("text/csv");
- response.setHeader("Content-Length", String.valueOf(
- csvFileContent.length()));
- response.setHeader("Content-Disposition",
- "inline; filename=\"" + requestedCsvFile + ".csv\"");
- response.getWriter().print(csvFileContent);
- }
-}
-
diff --git a/src/org/torproject/ernie/web/ExoneraTorServlet.java b/src/org/torproject/ernie/web/ExoneraTorServlet.java
deleted file mode 100644
index 3f9746e..0000000
--- a/src/org/torproject/ernie/web/ExoneraTorServlet.java
+++ /dev/null
@@ -1,1154 +0,0 @@
-/* Copyright 2011, 2012 The Tor Project
- * See LICENSE for licensing information */
-package org.torproject.ernie.web;
-
-import java.io.BufferedReader;
-import java.io.IOException;
-import java.io.PrintWriter;
-import java.io.StringReader;
-import java.sql.CallableStatement;
-import java.sql.Connection;
-import java.sql.ResultSet;
-import java.sql.SQLException;
-import java.sql.Statement;
-import java.text.ParseException;
-import java.text.SimpleDateFormat;
-import java.util.ArrayList;
-import java.util.HashSet;
-import java.util.List;
-import java.util.Set;
-import java.util.SortedMap;
-import java.util.SortedSet;
-import java.util.TimeZone;
-import java.util.TreeMap;
-import java.util.TreeSet;
-import java.util.logging.Level;
-import java.util.logging.Logger;
-import java.util.regex.Matcher;
-import java.util.regex.Pattern;
-
-import javax.naming.Context;
-import javax.naming.InitialContext;
-import javax.naming.NamingException;
-import javax.servlet.ServletException;
-import javax.servlet.http.HttpServlet;
-import javax.servlet.http.HttpServletRequest;
-import javax.servlet.http.HttpServletResponse;
-import javax.sql.DataSource;
-
-import org.apache.commons.codec.binary.Hex;
-import org.apache.commons.lang.StringEscapeUtils;
-
-public class ExoneraTorServlet extends HttpServlet {
-
- private static final long serialVersionUID = 1370088989739567509L;
-
- private DataSource ds;
-
- private Logger logger;
-
- public void init() {
-
- /* Initialize logger. */
- this.logger = Logger.getLogger(ExoneraTorServlet.class.toString());
-
- /* Look up data source. */
- try {
- Context cxt = new InitialContext();
- this.ds = (DataSource) cxt.lookup("java:comp/env/jdbc/exonerator");
- this.logger.info("Successfully looked up data source.");
- } catch (NamingException e) {
- this.logger.log(Level.WARNING, "Could not look up data source", e);
- }
- }
-
- private void writeHeader(PrintWriter out) throws IOException {
- out.println("<!DOCTYPE HTML PUBLIC \"-//W3C//DTD HTML 4.0 "
- + "Transitional//EN\">\n"
- + "<html>\n"
- + " <head>\n"
- + " <title>Tor Metrics Portal: ExoneraTor</title>\n"
- + " <meta http-equiv=\"content-type\" content=\"text/html; "
- + "charset=ISO-8859-1\">\n"
- + " <link href=\"/css/stylesheet-ltr.css\" type=\"text/css\" "
- + "rel=\"stylesheet\">\n"
- + " <link href=\"/images/favicon.ico\" "
- + "type=\"image/x-icon\" rel=\"shortcut icon\">\n"
- + " </head>\n"
- + " <body>\n"
- + " <div class=\"center\">\n"
- + " <table class=\"banner\" border=\"0\" cellpadding=\"0\" "
- + "cellspacing=\"0\" summary=\"\">\n"
- + " <tr>\n"
- + " <td class=\"banner-left\"><a "
- + "href=\"/index.html\"><img src=\"/images/top-left.png\" "
- + "alt=\"Click to go to home page\" width=\"193\" "
- + "height=\"79\"></a></td>\n"
- + " <td class=\"banner-middle\">\n"
- + " <a href=\"/\">Home</a>\n"
- + " <a href=\"graphs.html\">Graphs</a>\n"
- + " <a href=\"research.html\">Research</a>\n"
- + " <a href=\"status.html\">Status</a>\n"
- + " <br>\n"
- + " <font size=\"2\">\n"
- + " <a class=\"current\">ExoneraTor</a>\n"
- + " <a href=\"relay-search.html\">Relay Search</a>\n"
- + " <a href=\"consensus-health.html\">Consensus "
- + "Health</a>\n"
- + " </font>\n"
- + " </td>\n"
- + " <td class=\"banner-right\"></td>\n"
- + " </tr>\n"
- + " </table>\n"
- + " <div class=\"main-column\" style=\"margin:5; "
- + "Padding:0;\">\n"
- + " <h2>ExoneraTor</h2>\n"
- + " <h3>or: a website that tells you whether a given IP "
- + "address was a Tor relay</h3>\n"
- + " <br>\n"
- + " <p>Just because you see an Internet connection from a "
- + "particular IP address does not mean you know <i>who</i> "
- + "originated the traffic. Tor anonymizes Internet traffic by "
- + "\"<a href=\"https://www.torproject.org/about/overview"
- + "#thesolution\">onion routing</a>,\" sending packets "
- + "through a series of encrypted hops before they reach their "
- + "destination. Therefore, if you see traffic from a Tor node, "
- + "you may be seeing traffic that originated from someone "
- + "using Tor, rather than from the node operator itself. The "
- + "Tor Project and Tor node operators have no records of the "
- + "traffic that passes over the network, but we do maintain "
- + "current and historical records of which IP addresses are "
- + "part of the Tor network.</p>\n"
- + " <br>\n"
- + " <p>ExoneraTor tells you whether there was a Tor relay "
- + "running on a given IP address at a given time. ExoneraTor "
- + "can further indicate whether this relay permitted exiting "
- + "to a given server and/or TCP port. ExoneraTor learns these "
- + "facts by parsing the public relay lists and relay "
- + "descriptors that are collected from the Tor directory "
- + "authorities and the exit lists collected by TorDNSEL. By "
- + "inputting an IP address and time, you can determine whether "
- + "that IP was then a part of the Tor network.</p>\n"
- + " <br>\n"
- + " <p><font color=\"red\"><b>Notice:</b> Note that the "
- + "information you are providing below may be visible to "
- + "anyone who can read the network traffic between you and "
- + "this web server or who has access to this web "
- + "server.</font></p>\n"
- + " <br>\n");
- }
-
- private void writeFooter(PrintWriter out) throws IOException {
- out.println(" <br>\n"
- + " </div>\n"
- + " </div>\n"
- + " <div class=\"bottom\" id=\"bottom\">\n"
- + " <p>This material is supported in part by the National "
- + "Science Foundation under Grant No. CNS-0959138. Any "
- + "opinions, finding, and conclusions or recommendations "
- + "expressed in this material are those of the author(s) and "
- + "do not necessarily reflect the views of the National "
- + "Science Foundation.</p>\n"
- + " <p>\"Tor\" and the \"Onion Logo\" are <a "
- + "href=\"https://www.torproject.org/docs/trademark-faq.html.en"
- + "\">registered trademarks</a> of The Tor Project, Inc.</p>\n"
- + " <p>Data on this site is freely available under a <a "
- + "href=\"http://creativecommons.org/publicdomain/zero/1.0/\">"
- + "CC0 no copyright declaration</a>: To the extent possible "
- + "under law, the Tor Project has waived all copyright and "
- + "related or neighboring rights in the data. Graphs are "
- + "licensed under a <a "
- + "href=\"http://creativecommons.org/licenses/by/3.0/us/\">"
- + "Creative Commons Attribution 3.0 United States "
- + "License</a>.</p>\n"
- + " </div>\n"
- + " </body>\n"
- + "</html>");
- out.close();
- }
-
- public void doGet(HttpServletRequest request,
- HttpServletResponse response) throws IOException,
- ServletException {
-
- /* Start writing response. */
- PrintWriter out = response.getWriter();
- writeHeader(out);
-
- /* Open a database connection that we'll use to handle the whole
- * request. */
- Connection conn = null;
- long requestedConnection = System.currentTimeMillis();
- try {
- conn = this.ds.getConnection();
- } catch (SQLException e) {
- out.println("<p><font color=\"red\"><b>Warning: </b></font>Unable "
- + "to connect to the database. If this problem persists, "
- + "please <a href=\"mailto:tor-assistants@xxxxxxxxxxxxxx\">let "
- + "us know</a>!</p>\n");
- writeFooter(out);
- return;
- }
-
- /* Look up first and last consensus in the database. */
- long firstValidAfter = -1L, lastValidAfter = -1L;
- try {
- Statement statement = conn.createStatement();
- String query = "SELECT MIN(validafter) AS first, "
- + "MAX(validafter) AS last FROM consensus";
- ResultSet rs = statement.executeQuery(query);
- if (rs.next()) {
- firstValidAfter = rs.getTimestamp(1).getTime();
- lastValidAfter = rs.getTimestamp(2).getTime();
- }
- rs.close();
- statement.close();
- } catch (SQLException e) {
- /* Looks like we don't have any consensuses. */
- }
- if (firstValidAfter < 0L || lastValidAfter < 0L) {
- out.println("<p><font color=\"red\"><b>Warning: </b></font>This "
- + "server doesn't have any relay lists available. If this "
- + "problem persists, please "
- + "<a href=\"mailto:tor-assistants@xxxxxxxxxxxxxx\">let us "
- + "know</a>!</p>\n");
- writeFooter(out);
- try {
- conn.close();
- this.logger.info("Returned a database connection to the pool "
- + "after " + (System.currentTimeMillis()
- - requestedConnection) + " millis.");
- } catch (SQLException e) {
- }
- return;
- }
-
- out.println("<a name=\"relay\"></a><h3>Was there a Tor relay running "
- + "on this IP address?</h3>");
-
- /* Parse IP parameter. */
- Pattern ipv4AddressPattern = Pattern.compile(
- "^([01]?\\d\\d?|2[0-4]\\d|25[0-5])\\." +
- "([01]?\\d\\d?|2[0-4]\\d|25[0-5])\\." +
- "([01]?\\d\\d?|2[0-4]\\d|25[0-5])\\." +
- "([01]?\\d\\d?|2[0-4]\\d|25[0-5])$");
- Pattern ipv6AddressPattern = Pattern.compile(
- "^\\[?[0-9a-fA-F:]{3,39}\\]?$");
- String ipParameter = request.getParameter("ip");
- String relayIP = "", ipWarning = "";
- if (ipParameter != null && ipParameter.length() > 0) {
- if (ipv4AddressPattern.matcher(ipParameter).matches()) {
- String[] ipParts = ipParameter.split("\\.");
- relayIP = Integer.parseInt(ipParts[0]) + "."
- + Integer.parseInt(ipParts[1]) + "."
- + Integer.parseInt(ipParts[2]) + "."
- + Integer.parseInt(ipParts[3]);
- } else if (ipv6AddressPattern.matcher(ipParameter).matches()) {
- if (ipParameter.startsWith("[") && ipParameter.endsWith("]")) {
- ipParameter = ipParameter.substring(1,
- ipParameter.length() - 1);
- }
- StringBuilder addressHex = new StringBuilder();
- int start = ipParameter.startsWith("::") ? 1 : 0;
- int end = ipParameter.length()
- - (ipParameter.endsWith("::") ? 1 : 0);
- String[] parts = ipParameter.substring(start, end).split(":", -1);
- for (int i = 0; i < parts.length; i++) {
- String part = parts[i];
- if (part.length() == 0) {
- addressHex.append("x");
- } else if (part.length() <= 4) {
- addressHex.append(String.format("%4s", part));
- } else {
- addressHex = null;
- break;
- }
- }
- if (addressHex != null) {
- String addressHexString = addressHex.toString();
- addressHexString = addressHexString.replaceFirst("x",
- String.format("%" + (33 - addressHexString.length()) + "s",
- "0"));
- if (!addressHexString.contains("x") &&
- addressHexString.length() == 32) {
- relayIP = ipParameter.toLowerCase();
- }
- }
- if (relayIP.length() < 1) {
- ipWarning = "\"" + (ipParameter.length() > 40 ?
- StringEscapeUtils.escapeHtml(ipParameter.substring(0, 40))
- + "[...]" : StringEscapeUtils.escapeHtml(ipParameter))
- + "\" is not a valid IP address.";
- }
- } else {
- ipWarning = "\"" + (ipParameter.length() > 20 ?
- StringEscapeUtils.escapeHtml(ipParameter.substring(0, 20))
- + "[...]" : StringEscapeUtils.escapeHtml(ipParameter))
- + "\" is not a valid IP address.";
- }
- }
-
- /* Parse timestamp parameter. */
- String timestampParameter = request.getParameter("timestamp");
- long timestamp = 0L;
- boolean timestampIsDate = false;
- String timestampStr = "", timestampWarning = "";
- SimpleDateFormat shortDateTimeFormat = new SimpleDateFormat(
- "yyyy-MM-dd HH:mm");
- shortDateTimeFormat.setTimeZone(TimeZone.getTimeZone("UTC"));
- SimpleDateFormat dateFormat = new SimpleDateFormat("yyyy-MM-dd");
- dateFormat.setTimeZone(TimeZone.getTimeZone("UTC"));
- if (timestampParameter != null && timestampParameter.length() > 0) {
- try {
- if (timestampParameter.split(" ").length == 1) {
- timestamp = dateFormat.parse(timestampParameter).getTime();
- timestampStr = dateFormat.format(timestamp);
- timestampIsDate = true;
- } else {
- timestamp = shortDateTimeFormat.parse(timestampParameter).
- getTime();
- timestampStr = shortDateTimeFormat.format(timestamp);
- }
- if (timestamp < firstValidAfter || timestamp > lastValidAfter) {
- timestampWarning = "Please pick a date or timestamp between \""
- + shortDateTimeFormat.format(firstValidAfter) + "\" and \""
- + shortDateTimeFormat.format(lastValidAfter) + "\".";
- timestamp = 0L;
- }
- } catch (ParseException e) {
- /* We have no way to handle this exception, other than leaving
- timestampStr at "". */
- timestampWarning = "\"" + (timestampParameter.length() > 20 ?
- StringEscapeUtils.escapeHtml(timestampParameter.
- substring(0, 20)) + "[...]" :
- StringEscapeUtils.escapeHtml(timestampParameter))
- + "\" is not a valid date or timestamp.";
- }
- }
-
- /* If either IP address or timestamp is provided, the other one must
- * be provided, too. */
- if (relayIP.length() < 1 && timestampStr.length() > 0 &&
- ipWarning.length() < 1) {
- ipWarning = "Please provide an IP address.";
- }
- if (relayIP.length() > 0 && timestamp < 1 &&
- timestampWarning.length() < 1) {
- timestampWarning = "Please provide a date or timestamp.";
- }
-
- /* Parse target IP parameter. */
- String targetIP = "", targetPort = "", target = "";
- String[] targetIPParts = null;
- String targetAddrParameter = request.getParameter("targetaddr");
- String targetAddrWarning = "";
- if (targetAddrParameter != null && targetAddrParameter.length() > 0) {
- Matcher targetAddrParameterMatcher =
- ipv4AddressPattern.matcher(targetAddrParameter);
- if (targetAddrParameterMatcher.matches()) {
- String[] targetAddrParts = targetAddrParameter.split("\\.");
- targetIP = Integer.parseInt(targetAddrParts[0]) + "."
- + Integer.parseInt(targetAddrParts[1]) + "."
- + Integer.parseInt(targetAddrParts[2]) + "."
- + Integer.parseInt(targetAddrParts[3]);
- target = targetIP;
- targetIPParts = targetIP.split("\\.");
- } else {
- targetAddrWarning = "\"" + (targetAddrParameter.length() > 20 ?
- StringEscapeUtils.escapeHtml(targetAddrParameter.substring(
- 0, 20)) + "[...]" : StringEscapeUtils.escapeHtml(
- targetAddrParameter)) + "\" is not a valid IP address.";
- }
- }
-
- /* Parse target port parameter. */
- String targetPortParameter = request.getParameter("targetport");
- String targetPortWarning = "";
- if (targetPortParameter != null && targetPortParameter.length() > 0) {
- Pattern targetPortPattern = Pattern.compile("\\d+");
- if (targetPortParameter.length() < 5 &&
- targetPortPattern.matcher(targetPortParameter).matches() &&
- !targetPortParameter.equals("0") &&
- Integer.parseInt(targetPortParameter) < 65536) {
- targetPort = targetPortParameter;
- if (target != null) {
- target += ":" + targetPort;
- } else {
- target = targetPort;
- }
- } else {
- targetPortWarning = "\"" + (targetPortParameter.length() > 8 ?
- StringEscapeUtils.escapeHtml(targetPortParameter.
- substring(0, 8)) + "[...]" :
- StringEscapeUtils.escapeHtml(targetPortParameter))
- + "\" is not a valid TCP port.";
- }
- }
-
- /* If target port is provided, a target address must be provided,
- * too. */
- /* TODO Relax this requirement. */
- if (targetPort.length() > 0 && targetIP.length() < 1 &&
- targetAddrWarning.length() < 1) {
- targetAddrWarning = "Please provide an IP address.";
- }
-
- /* Write form with IP address and timestamp. */
- out.println(" <form action=\"#relay\">\n"
- + " <input type=\"hidden\" name=\"targetaddr\" "
- + (targetIP.length() > 0 ? " value=\"" + targetIP + "\"" : "")
- + ">\n"
- + " <input type=\"hidden\" name=\"targetPort\""
- + (targetPort.length() > 0 ? " value=\"" + targetPort + "\"" : "")
- + ">\n"
- + " <table>\n"
- + " <tr>\n"
- + " <td align=\"right\">IP address in question:"
- + "</td>\n"
- + " <td><input type=\"text\" name=\"ip\" size=\"30\""
- + (relayIP.length() > 0 ? " value=\"" + relayIP + "\""
- : "")
- + ">"
- + (ipWarning.length() > 0 ? "<br><font color=\"red\">"
- + ipWarning + "</font>" : "")
- + "</td>\n"
- + " <td><i>(Ex.: 86.59.21.38 or "
- + "2001:858:2:2:aabb:0:563b:1526)</i></td>\n"
- + " </tr>\n"
- + " <tr>\n"
- + " <td align=\"right\">Date or timestamp, in "
- + "UTC:</td>\n"
- + " <td><input type=\"text\" name=\"timestamp\""
- + " size=\"30\""
- + (timestampStr.length() > 0 ? " value=\"" + timestampStr + "\""
- : "")
- + ">"
- + (timestampWarning.length() > 0 ? "<br><font color=\"red\">"
- + timestampWarning + "</font>" : "")
- + "</td>\n"
- + " <td><i>(Ex.: 2010-01-01 or 2010-01-01 12:00)"
- + "</i></td>\n"
- + " </tr>\n"
- + " <tr>\n"
- + " <td></td>\n"
- + " <td>\n"
- + " <input type=\"submit\">\n"
- + " <input type=\"reset\">\n"
- + " </td>\n"
- + " <td></td>\n"
- + " </tr>\n"
- + " </table>\n"
- + " </form>\n");
-
- if (relayIP.length() < 1 || timestamp < 1) {
- writeFooter(out);
- try {
- conn.close();
- this.logger.info("Returned a database connection to the pool "
- + "after " + (System.currentTimeMillis()
- - requestedConnection) + " millis.");
- } catch (SQLException e) {
- }
- return;
- }
-
- out.printf("<p>Looking up IP address %s in the relay lists "
- + "published ", relayIP);
- long timestampFrom, timestampTo;
- if (timestampIsDate) {
- /* If we only have a date, consider all consensuses published on the
- * given date, plus the ones published 3 hours before the given date
- * and until 23:59:59. */
- timestampFrom = timestamp - 3L * 60L * 60L * 1000L;
- timestampTo = timestamp + (24L * 60L * 60L - 1L) * 1000L;
- out.printf("on %s", timestampStr);
- } else {
- /* If we have an exact timestamp, consider the consensuses published
- * in the 3 hours preceding the UTC timestamp. */
- timestampFrom = timestamp - 3L * 60L * 60L * 1000L;
- timestampTo = timestamp;
- out.printf("between %s and %s UTC",
- shortDateTimeFormat.format(timestampFrom),
- shortDateTimeFormat.format(timestampTo));
- }
- /* If we don't find any relays in the given time interval, also look
- * at consensuses published 12 hours before and 12 hours after the
- * interval, in case the user got the "UTC" bit wrong. */
- long timestampTooOld = timestampFrom - 12L * 60L * 60L * 1000L;
- long timestampTooNew = timestampTo + 12L * 60L * 60L * 1000L;
- out.print(" as well as in the relevant exit lists. Clients could "
- + "have selected any of these relays to build circuits. "
- + "You may follow the links to relay lists and relay descriptors "
- + "to grep for the lines printed below and confirm that results "
- + "are correct.<br>");
- SimpleDateFormat validAfterTimeFormat = new SimpleDateFormat(
- "yyyy-MM-dd HH:mm:ss");
- validAfterTimeFormat.setTimeZone(TimeZone.getTimeZone("UTC"));
- String fromValidAfter = validAfterTimeFormat.format(timestampTooOld);
- String toValidAfter = validAfterTimeFormat.format(timestampTooNew);
- SortedSet<Long> tooOldConsensuses = new TreeSet<Long>();
- SortedSet<Long> relevantConsensuses = new TreeSet<Long>();
- SortedSet<Long> tooNewConsensuses = new TreeSet<Long>();
- try {
- Statement statement = conn.createStatement();
- String query = "SELECT validafter FROM consensus "
- + "WHERE validafter >= '" + fromValidAfter
- + "' AND validafter <= '" + toValidAfter + "'";
- ResultSet rs = statement.executeQuery(query);
- while (rs.next()) {
- long consensusTime = rs.getTimestamp(1).getTime();
- if (consensusTime < timestampFrom) {
- tooOldConsensuses.add(consensusTime);
- } else if (consensusTime > timestampTo) {
- tooNewConsensuses.add(consensusTime);
- } else {
- relevantConsensuses.add(consensusTime);
- }
- }
- rs.close();
- statement.close();
- } catch (SQLException e) {
- /* Looks like we don't have any consensuses in the requested
- * interval. */
- }
- SortedSet<Long> allConsensuses = new TreeSet<Long>();
- allConsensuses.addAll(tooOldConsensuses);
- allConsensuses.addAll(relevantConsensuses);
- allConsensuses.addAll(tooNewConsensuses);
- if (allConsensuses.isEmpty()) {
- out.println(" <p>No relay lists found!</p>\n"
- + " <p>Result is INDECISIVE!</p>\n"
- + " <p>We cannot make any statement whether there was "
- + "a Tor relay running on IP address " + relayIP
- + (timestampIsDate ? " on " : " at ") + timestampStr + "! We "
- + "did not find any relevant relay lists at the given time. If "
- + "you think this is an error on our side, please "
- + "<a href=\"mailto:tor-assistants@xxxxxxxxxxxxxx\">contact "
- + "us</a>!</p>\n");
- writeFooter(out);
- try {
- conn.close();
- this.logger.info("Returned a database connection to the pool "
- + "after " + (System.currentTimeMillis()
- - requestedConnection) + " millis.");
- } catch (SQLException e) {
- }
- return;
- }
-
- /* Search for status entries with the given IP address as onion
- * routing address, plus status entries of relays having an exit list
- * entry with the given IP address as exit address. */
- SortedMap<Long, SortedMap<String, String>> statusEntries =
- new TreeMap<Long, SortedMap<String, String>>();
- SortedSet<Long> positiveConsensusesNoTarget = new TreeSet<Long>();
- SortedMap<String, Set<Long>> relevantDescriptors =
- new TreeMap<String, Set<Long>>();
- try {
- CallableStatement cs = conn.prepareCall(
- "{call search_statusentries_by_address_date(?, ?)}");
- cs.setString(1, relayIP);
- cs.setDate(2, new java.sql.Date(timestamp));
- ResultSet rs = cs.executeQuery();
- while (rs.next()) {
- byte[] rawstatusentry = rs.getBytes(1);
- String descriptor = rs.getString(2);
- long validafter = rs.getTimestamp(3).getTime();
- positiveConsensusesNoTarget.add(validafter);
- if (!relevantDescriptors.containsKey(descriptor)) {
- relevantDescriptors.put(descriptor, new HashSet<Long>());
- }
- relevantDescriptors.get(descriptor).add(validafter);
- String fingerprint = rs.getString(4);
- String exitaddress = rs.getString(6);
- StringBuilder html = new StringBuilder();
- for (String line : new String(rawstatusentry).split("\n")) {
- if (line.startsWith("r ")) {
- String[] parts = line.split(" ");
- boolean orAddressMatches = parts[6].equals(relayIP);
- html.append("r " + parts[1] + " " + parts[2] + " "
- + "<a href=\"serverdesc?desc-id=" + descriptor + "\" "
- + "target=\"_blank\">" + parts[3] + "</a> " + parts[4]
- + " " + parts[5] + " " + (orAddressMatches ? "<b>" : "")
- + parts[6] + (orAddressMatches ? "</b>" : "") + " "
- + parts[7] + " " + parts[8] + "\n");
- } else if (line.startsWith("a ") &&
- line.toLowerCase().contains(relayIP)) {
- String address = line.substring("a ".length(),
- line.lastIndexOf(":"));
- String port = line.substring(line.lastIndexOf(":"));
- html.append("a <b>" + address + "</b>" + port + "\n");
- }
- }
- if (exitaddress != null && exitaddress.length() > 0) {
- long scanned = rs.getTimestamp(7).getTime();
- html.append(" [ExitAddress <b>" + exitaddress
- + "</b> " + validAfterTimeFormat.format(scanned) + "]\n");
- }
- if (!statusEntries.containsKey(validafter)) {
- statusEntries.put(validafter, new TreeMap<String, String>());
- }
- statusEntries.get(validafter).put(fingerprint, html.toString());
- }
- rs.close();
- cs.close();
- } catch (SQLException e) {
- /* Nothing found. */
- }
-
- /* Print out what we found. */
- SimpleDateFormat validAfterUrlFormat = new SimpleDateFormat(
- "yyyy-MM-dd-HH-mm-ss");
- validAfterUrlFormat.setTimeZone(TimeZone.getTimeZone("UTC"));
- out.print("<pre><code>");
- for (long consensus : allConsensuses) {
- if (relevantConsensuses.contains(consensus)) {
- String validAfterDatetime = validAfterTimeFormat.format(
- consensus);
- String validAfterString = validAfterUrlFormat.format(consensus);
- out.print("valid-after <b>"
- + "<a href=\"consensus?valid-after="
- + validAfterString + "\" target=\"_blank\">"
- + validAfterDatetime + "</b></a>\n");
- if (statusEntries.containsKey(consensus)) {
- for (String htmlString :
- statusEntries.get(consensus).values()) {
- out.print(htmlString);
- }
- }
- out.print("\n");
- }
- }
- out.print("</code></pre>");
- if (relevantDescriptors.isEmpty()) {
- out.printf(" <p>None found!</p>\n"
- + " <p>Result is NEGATIVE with high certainty!</p>\n"
- + " <p>We did not find IP "
- + "address " + relayIP + " in any of the relay or exit lists "
- + "that were published between %s and %s.</p>\n",
- dateFormat.format(timestampTooOld),
- dateFormat.format(timestampTooNew));
- /* Run another query to find out if there are relays running on
- * other IP addresses in the same /24 or /48 network and tell the
- * user about it. */
- List<String> addressesInSameNetwork = new ArrayList<String>();
- if (!relayIP.contains(":")) {
- String[] relayIPParts = relayIP.split("\\.");
- byte[] address24Bytes = new byte[3];
- address24Bytes[0] = (byte) Integer.parseInt(relayIPParts[0]);
- address24Bytes[1] = (byte) Integer.parseInt(relayIPParts[1]);
- address24Bytes[2] = (byte) Integer.parseInt(relayIPParts[2]);
- String address24 = Hex.encodeHexString(address24Bytes);
- try {
- CallableStatement cs = conn.prepareCall(
- "{call search_addresses_in_same_24 (?, ?)}");
- cs.setString(1, address24);
- cs.setDate(2, new java.sql.Date(timestamp));
- ResultSet rs = cs.executeQuery();
- while (rs.next()) {
- String address = rs.getString(1);
- if (!addressesInSameNetwork.contains(address)) {
- addressesInSameNetwork.add(address);
- }
- }
- rs.close();
- cs.close();
- } catch (SQLException e) {
- /* No other addresses in the same /24 found. */
- }
- } else {
- StringBuilder addressHex = new StringBuilder();
- int start = relayIP.startsWith("::") ? 1 : 0;
- int end = relayIP.length() - (relayIP.endsWith("::") ? 1 : 0);
- String[] parts = relayIP.substring(start, end).split(":", -1);
- for (int i = 0; i < parts.length; i++) {
- String part = parts[i];
- if (part.length() == 0) {
- addressHex.append("x");
- } else if (part.length() <= 4) {
- addressHex.append(String.format("%4s", part));
- } else {
- addressHex = null;
- break;
- }
- }
- String address48 = null;
- if (addressHex != null) {
- String addressHexString = addressHex.toString();
- addressHexString = addressHexString.replaceFirst("x",
- String.format("%" + (33 - addressHexString.length())
- + "s", "0"));
- if (!addressHexString.contains("x") &&
- addressHexString.length() == 32) {
- address48 = addressHexString.replaceAll(" ", "0").
- toLowerCase().substring(0, 12);
- }
- }
- if (address48 != null) {
- try {
- CallableStatement cs = conn.prepareCall(
- "{call search_addresses_in_same_48 (?, ?)}");
- cs.setString(1, address48);
- cs.setDate(2, new java.sql.Date(timestamp));
- ResultSet rs = cs.executeQuery();
- while (rs.next()) {
- String address = rs.getString(1);
- if (!addressesInSameNetwork.contains(address)) {
- addressesInSameNetwork.add(address);
- }
- }
- rs.close();
- cs.close();
- } catch (SQLException e) {
- /* No other addresses in the same /48 found. */
- }
- }
- }
- if (!addressesInSameNetwork.isEmpty()) {
- if (!relayIP.contains(":")) {
- out.print(" <p>The following other IP addresses of Tor "
- + "relays in the same /24 network were found in relay "
- + "and/or exit lists around the time that could be related "
- + "to IP address " + relayIP + ":</p>\n");
- } else {
- out.print(" <p>The following other IP addresses of Tor "
- + "relays in the same /48 network were found in relay "
- + "lists around the time that could be related to IP "
- + "address " + relayIP + ":</p>\n");
- }
- out.print(" <ul>\n");
- for (String s : addressesInSameNetwork) {
- out.print(" <li>" + s + "</li>\n");
- }
- out.print(" </ul>\n");
- }
- writeFooter(out);
- try {
- conn.close();
- this.logger.info("Returned a database connection to the pool "
- + "after " + (System.currentTimeMillis()
- - requestedConnection) + " millis.");
- } catch (SQLException e) {
- }
- return;
- }
-
- /* Print out result. */
- boolean inMostRelevantConsensuses = false,
- inOtherRelevantConsensus = false,
- inTooOldConsensuses = false,
- inTooNewConsensuses = false;
- for (long match : positiveConsensusesNoTarget) {
- if (timestampIsDate &&
- dateFormat.format(match).equals(timestampStr)) {
- inMostRelevantConsensuses = true;
- } else if (!timestampIsDate &&
- match == relevantConsensuses.last()) {
- inMostRelevantConsensuses = true;
- } else if (relevantConsensuses.contains(match)) {
- inOtherRelevantConsensus = true;
- } else if (tooOldConsensuses.contains(match)) {
- inTooOldConsensuses = true;
- } else if (tooNewConsensuses.contains(match)) {
- inTooNewConsensuses = true;
- }
- }
- if (inMostRelevantConsensuses) {
- out.print(" <p>Result is POSITIVE with high certainty!"
- + "</p>\n"
- + " <p>We found one or more relays on IP address "
- + relayIP + " in ");
- if (timestampIsDate) {
- out.print("relay list published on " + timestampStr);
- } else {
- out.print("the most recent relay list preceding " + timestampStr);
- }
- out.print(" that clients were likely to know.</p>\n");
- } else {
- if (inOtherRelevantConsensus) {
- out.println(" <p>Result is POSITIVE "
- + "with moderate certainty!</p>\n");
- out.println("<p>We found one or more relays on IP address "
- + relayIP + ", but not in ");
- if (timestampIsDate) {
- out.print("a relay list published on " + timestampStr);
- } else {
- out.print("the most recent relay list preceding "
- + timestampStr);
- }
- out.print(". A possible reason for the relay being missing in a "
- + "relay list might be that some of the directory "
- + "authorities had difficulties connecting to the relay. "
- + "However, clients might still have used the relay.</p>\n");
- } else {
- out.println(" <p>Result is NEGATIVE "
- + "with high certainty!</p>\n");
- out.println(" <p>We did not find any relay on IP address "
- + relayIP
- + " in the relay lists 3 hours preceding " + timestampStr
- + ".</p>\n");
- if (inTooOldConsensuses || inTooNewConsensuses) {
- if (inTooOldConsensuses && !inTooNewConsensuses) {
- out.println(" <p>Note that we found a matching relay "
- + "in relay lists that were published between 15 and 3 "
- + "hours before " + timestampStr + ".</p>\n");
- } else if (!inTooOldConsensuses && inTooNewConsensuses) {
- out.println(" <p>Note that we found a matching relay "
- + "in relay lists that were published up to 12 hours "
- + "after " + timestampStr + ".</p>\n");
- } else {
- out.println(" <p>Note that we found a matching relay "
- + "in relay lists that were published between 15 and 3 "
- + "hours before and in relay lists that were published "
- + "up to 12 hours after " + timestampStr + ".</p>\n");
- }
- if (timestampIsDate) {
- out.println("<p>Be sure to try out the previous/next day or "
- + "provide an exact timestamp in UTC.</p>");
- } else {
- out.println("<p>Make sure that the timestamp you "
- + "provided is correctly converted to the UTC "
- + "timezone.</p>");
- }
- }
- /* We didn't find any descriptor. No need to look up targets. */
- writeFooter(out);
- try {
- conn.close();
- this.logger.info("Returned a database connection to the pool "
- + "after " + (System.currentTimeMillis()
- - requestedConnection) + " millis.");
- } catch (SQLException e) {
- }
- return;
- }
- }
-
- /* Looking up targets for IPv6 is not supported yet. */
- if (relayIP.contains(":")) {
- writeFooter(out);
- return;
- }
-
- /* Second part: target */
- out.println("<br><a name=\"exit\"></a><h3>Was this relay configured "
- + "to permit exiting to a given target?</h3>");
-
- out.println(" <form action=\"#exit\">\n"
- + " <input type=\"hidden\" name=\"timestamp\"\n"
- + " value=\"" + timestampStr + "\">\n"
- + " <input type=\"hidden\" name=\"ip\" "
- + "value=\"" + relayIP + "\">\n"
- + " <table>\n"
- + " <tr>\n"
- + " <td align=\"right\">Target address:</td>\n"
- + " <td><input type=\"text\" name=\"targetaddr\""
- + (targetIP.length() > 0 ? " value=\"" + targetIP + "\"" : "")
- + "\">"
- + (targetAddrWarning.length() > 0 ? "<br><font color=\"red\">"
- + targetAddrWarning + "</font>" : "")
- + "</td>\n"
- + " <td><i>(Ex.: 4.3.2.1)</i></td>\n"
- + " </tr>\n"
- + " <tr>\n"
- + " <td align=\"right\">Target port:</td>\n"
- + " <td><input type=\"text\" name=\"targetport\""
- + (targetPort.length() > 0 ? " value=\"" + targetPort + "\""
- : "")
- + ">"
- + (targetPortWarning.length() > 0 ? "<br><font color=\"red\">"
- + targetPortWarning + "</font>" : "")
- + "</td>\n"
- + " <td><i>(Ex.: 80)</i></td>\n"
- + " </tr>\n"
- + " <tr>\n"
- + " <td></td>\n"
- + " <td>\n"
- + " <input type=\"submit\">\n"
- + " <input type=\"reset\">\n"
- + " </td>\n"
- + " <td></td>\n"
- + " </tr>\n"
- + " </table>\n"
- + " </form>\n");
-
- if (targetIP.length() < 1) {
- writeFooter(out);
- try {
- conn.close();
- this.logger.info("Returned a database connection to the pool "
- + "after " + (System.currentTimeMillis()
- - requestedConnection) + " millis.");
- } catch (SQLException e) {
- }
- return;
- }
-
- /* Parse router descriptors to check exit policies. */
- out.println("<p>Searching the relay descriptors published by the "
- + "relay on IP address " + relayIP + " to find out whether this "
- + "relay permitted exiting to " + target + ". You may follow the "
- + "links above to the relay descriptors and grep them for the "
- + "lines printed below to confirm that results are correct.</p>");
- SortedSet<Long> positiveConsensuses = new TreeSet<Long>();
- Set<String> missingDescriptors = new HashSet<String>();
- Set<String> descriptors = relevantDescriptors.keySet();
- for (String descriptor : descriptors) {
- byte[] rawDescriptor = null;
- try {
- String query = "SELECT rawdescriptor FROM descriptor "
- + "WHERE descriptor = '" + descriptor + "'";
- Statement statement = conn.createStatement();
- ResultSet rs = statement.executeQuery(query);
- if (rs.next()) {
- rawDescriptor = rs.getBytes(1);
- }
- rs.close();
- statement.close();
- } catch (SQLException e) {
- /* Consider this descriptors as 'missing'. */
- continue;
- }
- if (rawDescriptor != null && rawDescriptor.length > 0) {
- missingDescriptors.remove(descriptor);
- String rawDescriptorString = new String(rawDescriptor,
- "US-ASCII");
- try {
- BufferedReader br = new BufferedReader(
- new StringReader(rawDescriptorString));
- String line = null, routerLine = null, publishedLine = null;
- StringBuilder acceptRejectLines = new StringBuilder();
- boolean foundMatch = false;
- while ((line = br.readLine()) != null) {
- if (line.startsWith("router ")) {
- routerLine = line;
- } else if (line.startsWith("published ")) {
- publishedLine = line;
- } else if (line.startsWith("reject ") ||
- line.startsWith("accept ")) {
- if (foundMatch) {
- out.println(line);
- continue;
- }
- boolean ruleAccept = line.split(" ")[0].equals("accept");
- String ruleAddress = line.split(" ")[1].split(":")[0];
- if (!ruleAddress.equals("*")) {
- if (!ruleAddress.contains("/") &&
- !ruleAddress.equals(targetIP)) {
- /* IP address does not match. */
- acceptRejectLines.append(line + "\n");
- continue;
- }
- String[] ruleIPParts = ruleAddress.split("/")[0].
- split("\\.");
- int ruleNetwork = ruleAddress.contains("/") ?
- Integer.parseInt(ruleAddress.split("/")[1]) : 32;
- for (int i = 0; i < 4; i++) {
- if (ruleNetwork == 0) {
- break;
- } else if (ruleNetwork >= 8) {
- if (ruleIPParts[i].equals(targetIPParts[i])) {
- ruleNetwork -= 8;
- } else {
- break;
- }
- } else {
- int mask = 255 ^ 255 >>> ruleNetwork;
- if ((Integer.parseInt(ruleIPParts[i]) & mask) ==
- (Integer.parseInt(targetIPParts[i]) & mask)) {
- ruleNetwork = 0;
- }
- break;
- }
- }
- if (ruleNetwork > 0) {
- /* IP address does not match. */
- acceptRejectLines.append(line + "\n");
- continue;
- }
- }
- String rulePort = line.split(" ")[1].split(":")[1];
- if (targetPort.length() < 1 && !ruleAccept &&
- !rulePort.equals("*")) {
- /* With no port given, we only consider reject :* rules as
- matching. */
- acceptRejectLines.append(line + "\n");
- continue;
- }
- if (targetPort.length() > 0 && !rulePort.equals("*") &&
- rulePort.contains("-")) {
- int fromPort = Integer.parseInt(rulePort.split("-")[0]);
- int toPort = Integer.parseInt(rulePort.split("-")[1]);
- int targetPortInt = Integer.parseInt(targetPort);
- if (targetPortInt < fromPort ||
- targetPortInt > toPort) {
- /* Port not contained in interval. */
- continue;
- }
- }
- if (targetPort.length() > 0) {
- if (!rulePort.equals("*") &&
- !rulePort.contains("-") &&
- !targetPort.equals(rulePort)) {
- /* Ports do not match. */
- acceptRejectLines.append(line + "\n");
- continue;
- }
- }
- boolean relevantMatch = false;
- for (long match : relevantDescriptors.get(descriptor)) {
- if (relevantConsensuses.contains(match)) {
- relevantMatch = true;
- }
- }
- if (relevantMatch) {
- String[] routerParts = routerLine.split(" ");
- out.println("<pre><code>" + routerParts[0] + " "
- + routerParts[1] + " <b>" + routerParts[2] + "</b> "
- + routerParts[3] + " " + routerParts[4] + " "
- + routerParts[5]);
- String[] publishedParts = publishedLine.split(" ");
- out.println(publishedParts[0] + " <b>"
- + publishedParts[1] + " " + publishedParts[2]
- + "</b>");
- out.print(acceptRejectLines.toString());
- out.println("<b>" + line + "</b>");
- foundMatch = true;
- }
- if (ruleAccept) {
- positiveConsensuses.addAll(
- relevantDescriptors.get(descriptor));
- }
- }
- }
- br.close();
- if (foundMatch) {
- out.println("</code></pre>");
- }
- } catch (IOException e) {
- /* Could not read descriptor string. */
- continue;
- }
- }
- }
-
- /* Print out result. */
- inMostRelevantConsensuses = false;
- inOtherRelevantConsensus = false;
- inTooOldConsensuses = false;
- inTooNewConsensuses = false;
- for (long match : positiveConsensuses) {
- if (timestampIsDate &&
- dateFormat.format(match).equals(timestampStr)) {
- inMostRelevantConsensuses = true;
- } else if (!timestampIsDate && match == relevantConsensuses.last()) {
- inMostRelevantConsensuses = true;
- } else if (relevantConsensuses.contains(match)) {
- inOtherRelevantConsensus = true;
- } else if (tooOldConsensuses.contains(match)) {
- inTooOldConsensuses = true;
- } else if (tooNewConsensuses.contains(match)) {
- inTooNewConsensuses = true;
- }
- }
- if (inMostRelevantConsensuses) {
- out.print(" <p>Result is POSITIVE with high certainty!"
- + "</p>\n"
- + " <p>We found one or more relays on IP address "
- + relayIP + " permitting exit to " + target + " in ");
- if (timestampIsDate) {
- out.print("relay list published on " + timestampStr);
- } else {
- out.print("the most recent relay list preceding " + timestampStr);
- }
- out.print(" that clients were likely to know.</p>\n");
- writeFooter(out);
- try {
- conn.close();
- this.logger.info("Returned a database connection to the pool "
- + "after " + (System.currentTimeMillis()
- - requestedConnection) + " millis.");
- } catch (SQLException e) {
- }
- return;
- }
- boolean resultIndecisive = target.length() > 0
- && !missingDescriptors.isEmpty();
- if (resultIndecisive) {
- out.println(" <p>Result is INDECISIVE!</p>\n"
- + " <p>At least one referenced descriptor could not be "
- + "found. This is a rare case, but one that (apparently) "
- + "happens. We cannot make any good statement about exit "
- + "relays without these descriptors. The following descriptors "
- + "are missing:</p>");
- for (String desc : missingDescriptors)
- out.println(" <p>" + desc + "</p>\n");
- }
- if (inOtherRelevantConsensus) {
- if (!resultIndecisive) {
- out.println(" <p>Result is POSITIVE "
- + "with moderate certainty!</p>\n");
- }
- out.println("<p>We found one or more relays on IP address "
- + relayIP + " permitting exit to " + target + ", but not in ");
- if (timestampIsDate) {
- out.print("a relay list published on " + timestampStr);
- } else {
- out.print("the most recent relay list preceding " + timestampStr);
- }
- out.print(". A possible reason for the relay being missing in a "
- + "relay list might be that some of the directory authorities "
- + "had difficulties connecting to the relay. However, clients "
- + "might still have used the relay.</p>\n");
- } else {
- if (!resultIndecisive) {
- out.println(" <p>Result is NEGATIVE "
- + "with high certainty!</p>\n");
- }
- out.println(" <p>We did not find any relay on IP address "
- + relayIP + " permitting exit to " + target
- + " in the relay list 3 hours preceding " + timestampStr
- + ".</p>\n");
- if (inTooOldConsensuses || inTooNewConsensuses) {
- if (inTooOldConsensuses && !inTooNewConsensuses) {
- out.println(" <p>Note that we found a matching relay in "
- + "relay lists that were published between 15 and 3 "
- + "hours before " + timestampStr + ".</p>\n");
- } else if (!inTooOldConsensuses && inTooNewConsensuses) {
- out.println(" <p>Note that we found a matching relay in "
- + "relay lists that were published up to 12 hours after "
- + timestampStr + ".</p>\n");
- } else {
- out.println(" <p>Note that we found a matching relay in "
- + "relay lists that were published between 15 and 3 "
- + "hours before and in relay lists that were published up "
- + "to 12 hours after " + timestampStr + ".</p>\n");
- }
- if (timestampIsDate) {
- out.println("<p>Be sure to try out the previous/next day or "
- + "provide an exact timestamp in UTC.</p>");
- } else {
- out.println("<p>Make sure that the timestamp you provided is "
- + "correctly converted to the UTC timezone.</p>");
- }
- }
- }
- if (target != null) {
- if (positiveConsensuses.isEmpty() &&
- !positiveConsensusesNoTarget.isEmpty()) {
- out.println(" <p>Note that although the found relay(s) did "
- + "not permit exiting to " + target + ", there have been one "
- + "or more relays running at the given time.</p>");
- }
- }
- try {
- conn.close();
- this.logger.info("Returned a database connection to the pool "
- + "after " + (System.currentTimeMillis()
- - requestedConnection) + " millis.");
- } catch (SQLException e) {
- }
- writeFooter(out);
- }
-}
-
diff --git a/src/org/torproject/ernie/web/GraphDataServlet.java b/src/org/torproject/ernie/web/GraphDataServlet.java
deleted file mode 100644
index f8c92bc..0000000
--- a/src/org/torproject/ernie/web/GraphDataServlet.java
+++ /dev/null
@@ -1,279 +0,0 @@
-/* Copyright 2011, 2012 The Tor Project
- * See LICENSE for licensing information */
-package org.torproject.ernie.web;
-
-import java.io.BufferedReader;
-import java.io.IOException;
-import java.io.StringReader;
-import java.text.ParseException;
-import java.text.SimpleDateFormat;
-import java.util.HashMap;
-import java.util.Map;
-import java.util.SortedMap;
-import java.util.SortedSet;
-import java.util.TimeZone;
-import java.util.TreeMap;
-import java.util.TreeSet;
-import java.util.logging.Logger;
-
-import javax.servlet.ServletException;
-import javax.servlet.http.HttpServlet;
-import javax.servlet.http.HttpServletRequest;
-import javax.servlet.http.HttpServletResponse;
-
-/**
- * Servlet that reads an HTTP request for a JSON-formatted graph data
- * document, asks the RObjectGenerator to generate the CSV file behind it,
- * converts it to a JSON object, and returns it to the client.
- */
-public class GraphDataServlet extends HttpServlet {
-
- private static final long serialVersionUID = 1799558498301134024L;
-
- private RObjectGenerator rObjectGenerator;
-
- /* Available graph data files. */
- private SortedMap<String, String> availableGraphDataFiles;
-
- /* Variable columns in CSV files that are in long form, not wide. */
- private SortedMap<String, String> variableColumns;
-
- /* Value columns in CSV files if only specific value columns shall be
- * included in results. */
- private SortedMap<String, String> valueColumns;
-
- private Logger logger;
-
- public void init() {
-
- /* Initialize logger. */
- this.logger = Logger.getLogger(GraphDataServlet.class.toString());
-
- /* Initialize map of available graph data files and corresponding CSV
- * files. */
- this.availableGraphDataFiles = new TreeMap<String, String>();
- this.availableGraphDataFiles.put("relays", "networksize");
- this.availableGraphDataFiles.put("bridges", "networksize");
- this.availableGraphDataFiles.put("cloudbridges", "cloudbridges");
- this.availableGraphDataFiles.put("relays-by-country",
- "relaycountries");
- this.availableGraphDataFiles.put("relays-by-flags", "relayflags");
- this.availableGraphDataFiles.put("relays-by-version", "versions");
- this.availableGraphDataFiles.put("relays-by-platform", "platforms");
- this.availableGraphDataFiles.put("relay-bandwidth", "bandwidth");
- this.availableGraphDataFiles.put("relay-dir-bandwidth", "dirbytes");
- this.availableGraphDataFiles.put("relay-bandwidth-history-by-flags",
- "bwhist-flags");
- this.availableGraphDataFiles.put("relay-bandwidth-by-flags",
- "bandwidth-flags");
- this.availableGraphDataFiles.put("direct-users-by-country",
- "direct-users");
- this.availableGraphDataFiles.put("bridge-users-by-country",
- "bridge-users");
- this.availableGraphDataFiles.put("torperf", "torperf");
-
- /* Initialize map of graphs with specific variable columns. */
- this.variableColumns = new TreeMap<String, String>();
- this.variableColumns.put("relays-by-country", "country");
- this.variableColumns.put("relay-bandwidth-history-by-flags",
- "isexit,isguard");
- this.variableColumns.put("torperf", "source");
-
- /* Initialize map of graphs with specific value columns. */
- this.valueColumns = new TreeMap<String, String>();
- this.valueColumns.put("relays", "relays");
- this.valueColumns.put("bridges", "bridges");
-
- /* Get a reference to the R object generator that we need to generate
- * CSV files. */
- this.rObjectGenerator = (RObjectGenerator) getServletContext().
- getAttribute("RObjectGenerator");
- }
-
- public void doGet(HttpServletRequest request,
- HttpServletResponse response) throws IOException,
- ServletException {
-
- /* Check if the directory listing was requested. */
- String requestURI = request.getRequestURI();
- if (requestURI.equals("/ernie/graphs/")) {
- request.setAttribute("directory", "/graphs");
- request.setAttribute("extension", "");
- request.setAttribute("files",
- this.availableGraphDataFiles.keySet());
- request.getRequestDispatcher("/WEB-INF/dir.jsp").forward(request,
- response);
- return;
- }
-
- /* Find out which JSON file was requested and make sure we know this
- * JSON file type. */
- String requestedJsonFile = request.getRequestURI();
- if (requestedJsonFile.contains("/")) {
- requestedJsonFile = requestedJsonFile.substring(requestedJsonFile.
- lastIndexOf("/") + 1);
- }
- if (!availableGraphDataFiles.containsKey(requestedJsonFile)) {
- logger.info("Did not recognize requested .csv file from request "
- + "URI: '" + request.getRequestURI() + "'. Responding with 404 "
- + "Not Found.");
- response.sendError(HttpServletResponse.SC_NOT_FOUND);
- return;
- }
- String requestedCsvFile = this.availableGraphDataFiles.get(
- requestedJsonFile);
- logger.fine("CSV file '" + requestedCsvFile + ".csv' requested.");
-
- /* Request CSV file from R object generator, which may ask Rserve to
- * generate it. */
- RObject csvFile = this.rObjectGenerator.generateCsv(requestedCsvFile,
- true);
-
- /* Make sure that we have a CSV to convert into JSON. */
- if (csvFile == null) {
- response.sendError(HttpServletResponse.SC_INTERNAL_SERVER_ERROR);
- return;
- }
-
- /* Look up if we converted this CSV to JSON format before. If not,
- * convert it now. */
- String jsonString;
- if (!this.lastConvertedCsvFile.containsKey(requestedJsonFile) ||
- this.lastConvertedCsvFile.get(requestedJsonFile) <
- csvFile.getLastModified()) {
- jsonString = this.convertCsvToJson(requestedJsonFile,
- new String(csvFile.getBytes()));
- this.lastConvertedCsvFile.put(requestedJsonFile,
- csvFile.getLastModified());
- this.convertedCsvFiles.put(requestedJsonFile, jsonString);
- } else {
- jsonString = this.convertedCsvFiles.get(requestedJsonFile);
- }
-
- /* Make sure we have a JSON string to return. */
- if (jsonString == null) {
- response.sendError(HttpServletResponse.SC_INTERNAL_SERVER_ERROR);
- return;
- }
-
- /* Write JSON string to response. */
- response.setHeader("Access-Control-Allow-Origin", "*");
- response.setContentType("application/json");
- response.setCharacterEncoding("utf-8");
- response.getWriter().print(jsonString);
- }
-
- private Map<String, Long> lastConvertedCsvFile =
- new HashMap<String, Long>();
- private Map<String, String> convertedCsvFiles =
- new HashMap<String, String>();
- private String convertCsvToJson(String requestedJsonFile,
- String csvFileContent) {
- String jsonString = null;
- try {
- BufferedReader br = new BufferedReader(new StringReader(
- csvFileContent));
- String line;
- String[] columns = null;
- int dateCol = -1;
- SortedSet<Integer> variableCols = new TreeSet<Integer>();
- SortedSet<Integer> valueCols = new TreeSet<Integer>();
- if ((line = br.readLine()) != null) {
- columns = line.split(",");
- for (int i = 0; i < columns.length; i++) {
- if (columns[i].equals("date")) {
- dateCol = i;
- } else if (this.variableColumns.containsKey(requestedJsonFile)
- && this.variableColumns.get(requestedJsonFile).contains(
- columns[i])) {
- variableCols.add(i);
- } else if (!this.valueColumns.containsKey(requestedJsonFile) ||
- this.valueColumns.get(requestedJsonFile).contains(
- columns[i])) {
- valueCols.add(i);
- }
- }
- }
- if (columns == null || dateCol < 0 || valueCols.isEmpty()) {
- return null;
- }
- SortedMap<String, SortedSet<String>> graphs =
- new TreeMap<String, SortedSet<String>>();
- while ((line = br.readLine()) != null) {
- String[] elements = line.split(",");
- if (elements.length != columns.length) {
- return null;
- }
- String date = elements[dateCol];
- String variable = "";
- if (!variableCols.isEmpty()) {
- for (int variableCol : variableCols) {
- String variableString = elements[variableCol];
- if (variableString.equals("TRUE")) {
- variable += columns[variableCol] + "_";
- } else if (variableString.equals("FALSE")) {
- variable += "not" + columns[variableCol] + "_";
- } else {
- variable += variableString + "_";
- }
- }
- }
- for (int valueCol : valueCols) {
- if (elements[valueCol].equals("NA")) {
- continue;
- }
- String graphName = variable + columns[valueCol];
- if (!graphs.containsKey(graphName)) {
- graphs.put(graphName, new TreeSet<String>());
- }
- String dateAndValue = date + "=" + elements[valueCol];
- graphs.get(graphName).add(dateAndValue);
- }
- }
- StringBuilder sb = new StringBuilder();
- SimpleDateFormat dateFormat = new SimpleDateFormat("yyyy-MM-dd");
- dateFormat.setTimeZone(TimeZone.getTimeZone("UTC"));
- for (Map.Entry<String, SortedSet<String>> e : graphs.entrySet()) {
- String graphName = e.getKey();
- SortedSet<String> datesAndValues = e.getValue();
- if (datesAndValues.isEmpty()) {
- continue;
- }
- String[] firstDateAndValue = datesAndValues.first().split("=");
- String firstDate = firstDateAndValue[0];
- String lastDate = datesAndValues.last().split("=")[0];
- sb.append(",\n\"" + graphName + "\":{"
- + "\"first\":\"" + firstDate + "\","
- + "\"last\":\"" + lastDate + "\","
- + "\"values\":[");
- int written = 0;
- String previousDate = firstDate;
- long previousDateMillis = dateFormat.parse(previousDate).
- getTime();
- for (String dateAndValue : datesAndValues) {
- String parts[] = dateAndValue.split("=");
- String date = parts[0];
- long dateMillis = dateFormat.parse(date).getTime();
- String value = parts[1];
- while (dateMillis - 86400L * 1000L > previousDateMillis) {
- sb.append((written++ > 0 ? "," : "") + "null");
- previousDateMillis += 86400L * 1000L;
- previousDate = dateFormat.format(previousDateMillis);
- }
- sb.append((written++ > 0 ? "," : "") + value);
- previousDate = date;
- previousDateMillis = dateMillis;
- }
- sb.append("]}");
- }
- br.close();
- jsonString = "[" + sb.toString().substring(1) + "\n]";
- } catch (IOException e) {
- return null;
- } catch (ParseException e) {
- return null;
- }
- return jsonString;
- }
-}
-
diff --git a/src/org/torproject/ernie/web/GraphImageServlet.java b/src/org/torproject/ernie/web/GraphImageServlet.java
deleted file mode 100644
index ea24d44..0000000
--- a/src/org/torproject/ernie/web/GraphImageServlet.java
+++ /dev/null
@@ -1,76 +0,0 @@
-/* Copyright 2011, 2012 The Tor Project
- * See LICENSE for licensing information */
-package org.torproject.ernie.web;
-
-import java.io.BufferedOutputStream;
-import java.io.IOException;
-
-import javax.servlet.ServletException;
-import javax.servlet.http.HttpServlet;
-import javax.servlet.http.HttpServletRequest;
-import javax.servlet.http.HttpServletResponse;
-
-/**
- * Servlet that reads an HTTP request for a graph image, asks the
- * RObjectGenerator to generate this graph if it's not in the cache, and
- * returns the image bytes to the client.
- */
-public class GraphImageServlet extends HttpServlet {
-
- private static final long serialVersionUID = -7356818641689744288L;
-
- private RObjectGenerator rObjectGenerator;
-
- public void init() {
-
- /* Get a reference to the R object generator that we need to generate
- * graph images. */
- this.rObjectGenerator = (RObjectGenerator) getServletContext().
- getAttribute("RObjectGenerator");
- }
-
- public void doGet(HttpServletRequest request,
- HttpServletResponse response) throws IOException,
- ServletException {
-
- /* Find out which graph type was requested and make sure we know this
- * graph type and file type. */
- String requestedGraph = request.getRequestURI();
- String fileType = null;
- if (requestedGraph.endsWith(".png") ||
- requestedGraph.endsWith(".pdf") ||
- requestedGraph.endsWith(".svg")) {
- fileType = requestedGraph.substring(requestedGraph.length() - 3);
- requestedGraph = requestedGraph.substring(0, requestedGraph.length()
- - 4);
- }
- if (requestedGraph.contains("/")) {
- requestedGraph = requestedGraph.substring(requestedGraph.
- lastIndexOf("/") + 1);
- }
-
- /* Request graph from R object generator, which either returns it from
- * its cache or asks Rserve to generate it. */
- RObject graph = rObjectGenerator.generateGraph(requestedGraph,
- fileType, request.getParameterMap(), true);
-
- /* Make sure that we have a graph to return. */
- if (graph == null || graph.getBytes() == null || fileType == null) {
- response.sendError(HttpServletResponse.SC_INTERNAL_SERVER_ERROR);
- return;
- }
-
- /* Write graph bytes to response. */
- BufferedOutputStream output = null;
- response.setContentType("image/" + fileType);
- response.setHeader("Content-Length",
- String.valueOf(graph.getBytes().length));
- response.setHeader("Content-Disposition",
- "inline; filename=\"" + graph.getFileName() + "\"");
- output = new BufferedOutputStream(response.getOutputStream(), 1024);
- output.write(graph.getBytes(), 0, graph.getBytes().length);
- output.flush();
- output.close();
- }
-}
-
diff --git a/src/org/torproject/ernie/web/GraphParameterChecker.java b/src/org/torproject/ernie/web/GraphParameterChecker.java
deleted file mode 100644
index e93f9ea..0000000
--- a/src/org/torproject/ernie/web/GraphParameterChecker.java
+++ /dev/null
@@ -1,297 +0,0 @@
-/* Copyright 2011, 2012 The Tor Project
- * See LICENSE for licensing information */
-package org.torproject.ernie.web;
-
-import java.text.ParseException;
-import java.text.SimpleDateFormat;
-import java.util.Arrays;
-import java.util.HashMap;
-import java.util.HashSet;
-import java.util.List;
-import java.util.Map;
-import java.util.Set;
-import java.util.TimeZone;
-import java.util.regex.Pattern;
-
-/**
- * Checks request parameters passed to graph-generating servlets.
- */
-public class GraphParameterChecker {
-
- /**
- * Singleton instance of this class.
- */
- private static GraphParameterChecker instance =
- new GraphParameterChecker();
-
- /**
- * Returns the singleton instance of this class.
- */
- public static GraphParameterChecker getInstance() {
- return instance;
- }
-
- /* Date format for parsing start and end dates. */
- private SimpleDateFormat dateFormat;
-
- /* Available graphs with corresponding parameter lists. */
- private Map<String, String> availableGraphs;
-
- /* Known parameters and parameter values. */
- private Map<String, String> knownParameterValues;
-
- /**
- * Initializes map with valid parameters for each of the graphs.
- */
- public GraphParameterChecker() {
- this.dateFormat = new SimpleDateFormat("yyyy-MM-dd");
- this.dateFormat.setTimeZone(TimeZone.getTimeZone("UTC"));
-
- this.knownParameterValues = new HashMap<String, String>();
- this.knownParameterValues.put("flag",
- "Running,Exit,Guard,Fast,Stable");
- this.knownParameterValues.put("granularity", "day");
- StringBuilder sb = new StringBuilder("all");
- for (String[] country : Countries.getInstance().getCountryList()) {
- sb.append("," + country[0]);
- }
- this.knownParameterValues.put("country", sb.toString());
- this.knownParameterValues.put("events", "on,off,points");
- this.knownParameterValues.put("language", "all,en,zh_CN,fa");
- this.knownParameterValues.put("source", "all,siv,moria,torperf");
- this.knownParameterValues.put("filesize", "50kb,1mb,5mb");
- }
-
- public void setAvailableGraphs(Map<String, String> availableGraphs) {
- this.availableGraphs = availableGraphs;
- }
-
- /**
- * Checks request parameters for the given graph type and returns a map
- * of recognized parameters, or null if the graph type doesn't exist or
- * the parameters are invalid.
- */
- public Map<String, String[]> checkParameters(String graphType,
- Map requestParameters) {
-
- /* Check if the graph type exists. */
- if (graphType == null ||
- !this.availableGraphs.containsKey(graphType)) {
- return null;
- }
-
- /* Find out which other parameters are supported by this graph type
- * and parse them if they are given. */
- Set<String> supportedGraphParameters = new HashSet<String>(Arrays.
- asList(this.availableGraphs.get(graphType).split(",")));
- Map<String, String[]> recognizedGraphParameters =
- new HashMap<String, String[]>();
-
- /* Parse start and end dates if supported by the graph type. If no end
- * date is provided, set it to today. If no start date is provided,
- * set it to 90 days before the end date. Make sure that start date
- * precedes end date. */
- if (supportedGraphParameters.contains("start") ||
- supportedGraphParameters.contains("end")) {
- String[] startParameter = (String[]) requestParameters.get("start");
- String[] endParameter = (String[]) requestParameters.get("end");
- long endTimestamp = System.currentTimeMillis();
- if (endParameter != null && endParameter.length > 0 &&
- endParameter[0].length() > 0) {
- try {
- endTimestamp = dateFormat.parse(endParameter[0]).getTime();
- } catch (ParseException e) {
- return null;
- }
- if (!endParameter[0].startsWith("20")) {
- return null;
- }
- }
- endParameter = new String[] { dateFormat.format(endTimestamp) };
- long startTimestamp = endTimestamp - 90L * 24L * 60L * 60L * 1000L;
- if (startParameter != null && startParameter.length > 0 &&
- startParameter[0].length() > 0) {
- try {
- startTimestamp = dateFormat.parse(startParameter[0]).getTime();
- } catch (ParseException e) {
- return null;
- }
- if (!startParameter[0].startsWith("20")) {
- return null;
- }
- }
- startParameter = new String[] { dateFormat.format(startTimestamp) };
- if (startTimestamp > endTimestamp) {
- return null;
- }
- recognizedGraphParameters.put("start", startParameter);
- recognizedGraphParameters.put("end", endParameter);
- }
-
- /* Parse relay flags if supported by the graph type. If no relay flags
- * are passed or none of them have been recognized, use the set of all
- * known flags as default. */
- if (supportedGraphParameters.contains("flag")) {
- String[] flagParameters = (String[]) requestParameters.get("flag");
- List<String> knownFlags = Arrays.asList(
- this.knownParameterValues.get("flag").split(","));
- if (flagParameters != null) {
- for (String flag : flagParameters) {
- if (flag == null || flag.length() == 0 ||
- !knownFlags.contains(flag)) {
- return null;
- }
- }
- } else {
- flagParameters = this.knownParameterValues.get("flag").split(",");
- }
- recognizedGraphParameters.put("flag", flagParameters);
- }
-
- /* Parse granularity, which can be 1 day or 1 hour, if supported by
- * the graph type. The default is 1 day. */
- if (supportedGraphParameters.contains("granularity")) {
- String[] granularityParameter = (String[]) requestParameters.get(
- "granularity");
- List<String> knownGranularities = Arrays.asList(
- this.knownParameterValues.get("granularity").split(","));
- if (granularityParameter != null) {
- if (granularityParameter.length != 1 ||
- granularityParameter[0] == null ||
- !knownGranularities.contains(granularityParameter[0])) {
- return null;
- }
- } else {
- granularityParameter = new String[] { "day" };
- }
- recognizedGraphParameters.put("granularity", granularityParameter);
- }
-
- /* Parse country codes if supported by the graph type. If no countries
- * are passed, use country code "all" (all countries) as default. */
- if (supportedGraphParameters.contains("country")) {
- String[] countryParameters = (String[]) requestParameters.get(
- "country");
- List<String> knownCountries = Arrays.asList(
- this.knownParameterValues.get("country").split(","));
- if (countryParameters != null) {
- for (String country : countryParameters) {
- if (country == null || country.length() == 0 ||
- !knownCountries.contains(country)) {
- return null;
- }
- }
- } else {
- countryParameters = new String[] { "all" };
- }
- recognizedGraphParameters.put("country", countryParameters);
- }
-
- /* Parse whether the estimated min/max range shall be displayed if
- * supported by the graph type. This parameter can either be "on" or
- * "off," where "off" is the default. */
- if (supportedGraphParameters.contains("events")) {
- String[] eventsParameter = (String[]) requestParameters.get(
- "events");
- List<String> knownRanges = Arrays.asList(
- this.knownParameterValues.get("events").split(","));
- if (eventsParameter != null) {
- if (eventsParameter.length != 1 ||
- eventsParameter[0].length() == 0 ||
- !knownRanges.contains(eventsParameter[0])) {
- return null;
- }
- } else {
- eventsParameter = new String[] { "off" };
- }
- recognizedGraphParameters.put("events", eventsParameter);
- }
-
- /* Parse language if supported by the graph type. Only a single
- * language can be passed. If no language is passed, use "all" as
- * default. */
- if (supportedGraphParameters.contains("language")) {
- String[] languageParameter = (String[]) requestParameters.get(
- "language");
- List<String> knownBundles = Arrays.asList(
- this.knownParameterValues.get("language").split(","));
- if (languageParameter != null) {
- if (languageParameter.length != 1 ||
- languageParameter[0].length() == 0 ||
- !knownBundles.contains(languageParameter[0])) {
- return null;
- }
- } else {
- languageParameter = new String[] { "all" };
- }
- recognizedGraphParameters.put("language", languageParameter);
- }
-
- /* Parse torperf data source if supported by the graph type. Only a
- * single source can be passed. If no source is passed, use "torperf"
- * as default. */
- if (supportedGraphParameters.contains("source")) {
- String[] sourceParameter = (String[]) requestParameters.get(
- "source");
- List<String> knownSources = Arrays.asList(
- this.knownParameterValues.get("source").split(","));
- if (sourceParameter != null) {
- if (sourceParameter.length != 1) {
- return null;
- }
- if (sourceParameter[0].length() == 0 ||
- !knownSources.contains(sourceParameter[0])) {
- return null;
- }
- } else {
- sourceParameter = new String[] { "all" };
- }
- recognizedGraphParameters.put("source", sourceParameter);
- }
-
- /* Parse torperf file size if supported by the graph type. Only a
- * single file size can be passed. If no file size is passed, use
- * "50kb" as default. */
- if (supportedGraphParameters.contains("filesize")) {
- String[] filesizeParameter = (String[]) requestParameters.get(
- "filesize");
- List<String> knownFilesizes = Arrays.asList(
- this.knownParameterValues.get("filesize").split(","));
- if (filesizeParameter != null) {
- if (filesizeParameter.length != 1) {
- return null;
- }
- if (filesizeParameter[0].length() == 0 ||
- !knownFilesizes.contains(filesizeParameter[0])) {
- return null;
- }
- } else {
- filesizeParameter = new String[] { "50kb" };
- }
- recognizedGraphParameters.put("filesize", filesizeParameter);
- }
-
- /* Parse fingerprint if supported/required by the graph type. Make
- * sure the fingerprint contains only hexadecimal characters and is 40
- * characters long. Fail if no fingerprint is provided! */
- if (supportedGraphParameters.contains("fingerprint")) {
- String[] fingerprintParameter = (String[]) requestParameters.get(
- "fingerprint");
- if (fingerprintParameter == null ||
- fingerprintParameter.length != 1 ||
- fingerprintParameter[0] == null ||
- !Pattern.matches("[0-9a-f]{40}",
- fingerprintParameter[0].toLowerCase())) {
- return null;
- } else {
- fingerprintParameter[0] = fingerprintParameter[0].toLowerCase();
- recognizedGraphParameters.put("fingerprint",
- fingerprintParameter);
- }
- }
-
- /* We now have a map with all required graph parameters. Return it. */
- return recognizedGraphParameters;
- }
-}
-
diff --git a/src/org/torproject/ernie/web/GraphsSubpagesServlet.java b/src/org/torproject/ernie/web/GraphsSubpagesServlet.java
deleted file mode 100644
index b7172f2..0000000
--- a/src/org/torproject/ernie/web/GraphsSubpagesServlet.java
+++ /dev/null
@@ -1,163 +0,0 @@
-/* Copyright 2011, 2012 The Tor Project
- * See LICENSE for licensing information */
-package org.torproject.ernie.web;
-
-import java.io.IOException;
-import java.text.SimpleDateFormat;
-import java.util.Arrays;
-import java.util.Date;
-import java.util.HashMap;
-import java.util.HashSet;
-import java.util.List;
-import java.util.Map;
-import java.util.Set;
-import java.util.TimeZone;
-
-import javax.servlet.ServletException;
-import javax.servlet.http.HttpServlet;
-import javax.servlet.http.HttpServletRequest;
-import javax.servlet.http.HttpServletResponse;
-
-public class GraphsSubpagesServlet extends HttpServlet {
-
- private static final long serialVersionUID = -5959829347747628403L;
-
- /* Available graphs subpages with corresponding JSP to which requests
- * are forwarded. */
- private Map<String, String> availableGraphsSubpages;
-
- /* Available tables on graphs subpages. */
- private Map<String, Set<String>> availableGraphsSubpageTables;
-
- /* Country codes and names for per-country graphs. */
- private List<String[]> knownCountries;
-
- /* R object generator for generating table data. */
- private RObjectGenerator rObjectGenerator;
-
- public GraphsSubpagesServlet() {
- this.availableGraphsSubpages = new HashMap<String, String>();
- this.availableGraphsSubpages.put("network.html",
- "WEB-INF/network.jsp");
- this.availableGraphsSubpages.put("fast-exits.html",
- "WEB-INF/fast-exits.jsp");
- this.availableGraphsSubpages.put("users.html", "WEB-INF/users.jsp");
- this.availableGraphsSubpages.put("packages.html",
- "WEB-INF/packages.jsp");
- this.availableGraphsSubpages.put("performance.html",
- "WEB-INF/performance.jsp");
-
- this.availableGraphsSubpageTables =
- new HashMap<String, Set<String>>();
- this.availableGraphsSubpageTables.put("users.html",
- new HashSet<String>(Arrays.asList(
- "direct-users,censorship-events".split(","))));
-
- this.knownCountries = Countries.getInstance().getCountryList();
- }
-
- public void init() {
- /* Get a reference to the R object generator that we need to generate
- * table data. */
- this.rObjectGenerator = (RObjectGenerator) getServletContext().
- getAttribute("RObjectGenerator");
- }
-
- public void doGet(HttpServletRequest request,
- HttpServletResponse response) throws IOException, ServletException {
-
- /* Find out which graph subpage was requested and look up which JSP
- * handles this subpage. */
- String requestedPage = request.getRequestURI();
- if (requestedPage == null) {
- response.sendError(HttpServletResponse.SC_BAD_REQUEST);
- return;
- }
- if (requestedPage.contains("/")) {
- requestedPage = requestedPage.substring(requestedPage.
- lastIndexOf("/") + 1);
- }
- if (!availableGraphsSubpages.containsKey(requestedPage)) {
- response.sendError(HttpServletResponse.SC_INTERNAL_SERVER_ERROR);
- return;
- }
- String jsp = availableGraphsSubpages.get(requestedPage);
-
- /* Find out which graph or table type was requested, if any. */
- String requestedGraph = request.getParameter("graph");
- String requestedTable = request.getParameter("table");
- if (requestedGraph != null) {
-
- /* Check if the passed parameters are valid. */
- Map<String, String[]> checkedParameters = GraphParameterChecker.
- getInstance().checkParameters(requestedGraph,
- request.getParameterMap());
- if (checkedParameters != null) {
-
- /* Set the graph's attributes to the appropriate values, so that
- * we can display the correct graph and prepopulate the form. */
- StringBuilder urlBuilder = new StringBuilder();
- for (Map.Entry<String, String[]> param :
- checkedParameters.entrySet()) {
- request.setAttribute(requestedGraph.replaceAll("-", "_") + "_"
- + param.getKey(), param.getValue());
- for (String paramValue : param.getValue()) {
- urlBuilder.append("&" + param.getKey() + "=" + paramValue);
- }
- }
- String url = "?" + urlBuilder.toString().substring(1);
- request.setAttribute(requestedGraph.replaceAll("-", "_") + "_url",
- url);
- }
- }
- if (requestedTable != null) {
-
- /* Check if the passed parameters are valid. */
- Map<String, String[]> checkedParameters = TableParameterChecker.
- getInstance().checkParameters(requestedTable,
- request.getParameterMap());
- if (checkedParameters != null) {
-
- /* Set the table's attributes to the appropriate values, so that
- * we can prepopulate the form. */
- for (Map.Entry<String, String[]> param :
- checkedParameters.entrySet()) {
- request.setAttribute(requestedTable.replaceAll("-", "_") + "_"
- + param.getKey(), param.getValue());
- }
- }
- }
-
- /* Generate table data if the graphs subpage has any tables,
- * regardless of whether a table update was requested, and add the
- * table data as request attribute. */
- if (this.availableGraphsSubpageTables.containsKey(requestedPage)) {
- for (String tableName :
- this.availableGraphsSubpageTables.get(requestedPage)) {
- List<Map<String, String>> tableData = rObjectGenerator.
- generateTable(tableName, requestedTable,
- request.getParameterMap(), true);
- request.setAttribute(tableName.replaceAll("-", "_")
- + "_tabledata", tableData);
- }
- }
-
- /* Pass list of known countries in case we want to display them. */
- request.setAttribute("countries", this.knownCountries);
-
- /* Pass the default start and end dates. */
- SimpleDateFormat dateFormat = new SimpleDateFormat("yyyy-MM-dd");
- dateFormat.setTimeZone(TimeZone.getTimeZone("UTC"));
- Date defaultEndDate = new Date();
- Date defaultStartDate = new Date(defaultEndDate.getTime()
- - 90L * 24L * 60L * 60L * 1000L);
- request.setAttribute("default_start_date",
- dateFormat.format(defaultStartDate));
- request.setAttribute("default_end_date",
- dateFormat.format(defaultEndDate));
-
- /* Forward the request to the JSP that does all the hard work. */
- request.getRequestDispatcher(jsp).forward(request, response);
- }
-}
-
diff --git a/src/org/torproject/ernie/web/RObject.java b/src/org/torproject/ernie/web/RObject.java
deleted file mode 100644
index 25dac52..0000000
--- a/src/org/torproject/ernie/web/RObject.java
+++ /dev/null
@@ -1,23 +0,0 @@
-/* Copyright 2011, 2012 The Tor Project
- * See LICENSE for licensing information */
-package org.torproject.ernie.web;
-
-public class RObject {
- private byte[] bytes;
- private String fileName;
- private long lastModified;
- public RObject(byte[] bytes, String fileName, long lastModified) {
- this.bytes = bytes;
- this.fileName = fileName;
- this.lastModified = lastModified;
- }
- public String getFileName() {
- return this.fileName;
- }
- public byte[] getBytes() {
- return this.bytes;
- }
- public long getLastModified() {
- return this.lastModified;
- }
-}
diff --git a/src/org/torproject/ernie/web/RObjectGenerator.java b/src/org/torproject/ernie/web/RObjectGenerator.java
deleted file mode 100644
index 086d321..0000000
--- a/src/org/torproject/ernie/web/RObjectGenerator.java
+++ /dev/null
@@ -1,394 +0,0 @@
-/* Copyright 2011, 2012 The Tor Project
- * See LICENSE for licensing information */
-package org.torproject.ernie.web;
-
-import java.io.BufferedInputStream;
-import java.io.BufferedReader;
-import java.io.ByteArrayInputStream;
-import java.io.ByteArrayOutputStream;
-import java.io.File;
-import java.io.FileInputStream;
-import java.io.IOException;
-import java.io.InputStreamReader;
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.HashMap;
-import java.util.HashSet;
-import java.util.List;
-import java.util.Map;
-import java.util.Set;
-import java.util.SortedSet;
-import java.util.TreeSet;
-
-import javax.servlet.ServletContext;
-import javax.servlet.ServletContextEvent;
-import javax.servlet.ServletContextListener;
-
-import org.rosuda.REngine.Rserve.RConnection;
-import org.rosuda.REngine.Rserve.RserveException;
-
-public class RObjectGenerator implements ServletContextListener {
-
- /* Host and port where Rserve is listening. */
- private String rserveHost;
- private int rservePort;
-
- /* Some parameters for our cache of graph images. */
- private String cachedGraphsDirectory;
- private long maxCacheAge;
-
- private SortedSet<String> availableCsvFiles;
- private Map<String, String> availableTables;
- private Map<String, String> availableGraphs;
- private Set<String> availableGraphFileTypes;
-
- public void contextInitialized(ServletContextEvent event) {
-
- /* Initialize using context parameters. */
- ServletContext servletContext = event.getServletContext();
- this.rserveHost = servletContext.getInitParameter("rserveHost");
- this.rservePort = Integer.parseInt(servletContext.getInitParameter(
- "rservePort"));
- this.maxCacheAge = Long.parseLong(servletContext.getInitParameter(
- "maxCacheAge"));
- this.cachedGraphsDirectory = servletContext.getInitParameter(
- "cachedGraphsDir");
-
- /* Initialize map of available CSV files. */
- this.availableCsvFiles = new TreeSet<String>();
- this.availableCsvFiles.add("bandwidth");
- this.availableCsvFiles.add("bandwidth-flags");
- this.availableCsvFiles.add("bridge-users");
- this.availableCsvFiles.add("bwhist-flags");
- this.availableCsvFiles.add("connbidirect");
- this.availableCsvFiles.add("cloudbridges");
- this.availableCsvFiles.add("direct-users");
- this.availableCsvFiles.add("dirreq-stats");
- this.availableCsvFiles.add("dirbytes");
- this.availableCsvFiles.add("monthly-users-average");
- this.availableCsvFiles.add("monthly-users-peak");
- this.availableCsvFiles.add("networksize");
- this.availableCsvFiles.add("platforms");
- this.availableCsvFiles.add("relaycountries");
- this.availableCsvFiles.add("relayflags");
- this.availableCsvFiles.add("torperf");
- this.availableCsvFiles.add("torperf-failures");
- this.availableCsvFiles.add("versions");
-
- this.availableTables = new HashMap<String, String>();
- this.availableTables.put("direct-users", "start,end,filename");
- this.availableTables.put("censorship-events", "start,end,filename");
- TableParameterChecker.getInstance().setAvailableTables(
- availableTables);
-
- this.availableGraphs = new HashMap<String, String>();
- this.availableGraphs.put("networksize", "start,end,filename");
- this.availableGraphs.put("cloudbridges", "start,end,filename");
- this.availableGraphs.put("relaycountries",
- "start,end,country,filename");
- this.availableGraphs.put("relayflags", "start,end,flag,granularity,"
- + "filename");
- this.availableGraphs.put("versions", "start,end,filename");
- this.availableGraphs.put("platforms", "start,end,filename");
- this.availableGraphs.put("bandwidth", "start,end,filename");
- this.availableGraphs.put("bandwidth-flags", "start,end,filename");
- this.availableGraphs.put("bwhist-flags", "start,end,filename");
- this.availableGraphs.put("dirbytes", "start,end,filename");
- this.availableGraphs.put("direct-users",
- "start,end,country,events,filename");
- this.availableGraphs.put("bridge-users",
- "start,end,country,filename");
- this.availableGraphs.put("torperf",
- "start,end,source,filesize,filename");
- this.availableGraphs.put("torperf-failures",
- "start,end,source,filesize,filename");
- this.availableGraphs.put("connbidirect", "start,end,filename");
- this.availableGraphs.put("fast-exits", "start,end,filename");
- this.availableGraphs.put("almost-fast-exits", "start,end,filename");
- this.availableGraphFileTypes = new HashSet<String>(Arrays.asList(
- "png,pdf,svg".split(",")));
- GraphParameterChecker.getInstance().setAvailableGraphs(
- availableGraphs);
-
- /* Register ourself, so that servlets can use us. */
- servletContext.setAttribute("RObjectGenerator", this);
-
- /* Periodically generate R objects with default parameters. */
- new Thread() {
- public void run() {
- long lastUpdated = 0L, sleep;
- while (true) {
- while ((sleep = maxCacheAge * 1000L / 2L + lastUpdated
- - System.currentTimeMillis()) > 0L) {
- try {
- Thread.sleep(sleep);
- } catch (InterruptedException e) {
- }
- }
- for (String csvFile : availableCsvFiles) {
- generateCsv(csvFile, false);
- }
- for (String tableName : availableTables.keySet()) {
- generateTable(tableName, tableName, new HashMap(), false);
- }
- for (String graphName : availableGraphs.keySet()) {
- for (String fileType : availableGraphFileTypes) {
- generateGraph(graphName, fileType, new HashMap(), false);
- }
- }
- lastUpdated = System.currentTimeMillis();
- }
- };
- }.start();
- }
-
- public void contextDestroyed(ServletContextEvent event) {
- /* Nothing to do. */
- }
-
- public RObject generateGraph(String requestedGraph, String fileType,
- Map parameterMap, boolean checkCache) {
- Map<String, String[]> checkedParameters = GraphParameterChecker.
- getInstance().checkParameters(requestedGraph, parameterMap);
- if (checkedParameters == null) {
- /* TODO We're going to take the blame by sending an internal server
- * error to the client, but really the user is to blame. */
- return null;
- }
- StringBuilder rQueryBuilder = new StringBuilder("plot_"
- + requestedGraph.replaceAll("-", "_") + "("),
- imageFilenameBuilder = new StringBuilder(requestedGraph);
- for (Map.Entry<String, String[]> parameter :
- checkedParameters.entrySet()) {
- String parameterName = parameter.getKey();
- String[] parameterValues = parameter.getValue();
- for (String param : parameterValues) {
- imageFilenameBuilder.append("-" + param);
- }
- if (parameterValues.length < 2) {
- rQueryBuilder.append(parameterName + " = '" + parameterValues[0]
- + "', ");
- } else {
- rQueryBuilder.append(parameterName + " = c(");
- for (int i = 0; i < parameterValues.length - 1; i++) {
- rQueryBuilder.append("'" + parameterValues[i] + "', ");
- }
- rQueryBuilder.append("'" + parameterValues[
- parameterValues.length - 1] + "'), ");
- }
- }
- imageFilenameBuilder.append("." + fileType);
- String imageFilename = imageFilenameBuilder.toString();
- rQueryBuilder.append("path = '%s')");
- String rQuery = rQueryBuilder.toString();
- File imageFile = new File(this.cachedGraphsDirectory + "/"
- + imageFilename);
- return this.generateRObject(rQuery, imageFile, imageFilename,
- checkCache);
- }
-
- public SortedSet<String> getAvailableCsvFiles() {
- return this.availableCsvFiles;
- }
-
- public RObject generateCsv(String requestedCsvFile,
- boolean checkCache) {
- /* Prepare filename and R query string. */
- String rQuery = "export_" + requestedCsvFile.replaceAll("-", "_")
- + "(path = '%s')";
- String csvFilename = requestedCsvFile + ".csv";
-
- /* See if we need to generate this .csv file. */
- File csvFile = new File(this.cachedGraphsDirectory + "/"
- + csvFilename);
- return this.generateRObject(rQuery, csvFile, csvFilename, checkCache);
- }
-
- public List<Map<String, String>> generateTable(String tableName,
- String requestedTable, Map parameterMap, boolean checkCache) {
-
- Map<String, String[]> checkedParameters = null;
- if (tableName.equals(requestedTable)) {
- checkedParameters = TableParameterChecker.
- getInstance().checkParameters(requestedTable,
- parameterMap);
- } else {
- checkedParameters = TableParameterChecker.
- getInstance().checkParameters(tableName, null);
- }
- if (checkedParameters == null) {
- /* TODO We're going to take the blame by sending an internal server
- * error to the client, but really the user is to blame. */
- return null;
- }
- StringBuilder rQueryBuilder = new StringBuilder("write_"
- + tableName.replaceAll("-", "_") + "("),
- tableFilenameBuilder = new StringBuilder(tableName);
-
- for (Map.Entry<String, String[]> parameter :
- checkedParameters.entrySet()) {
- String parameterName = parameter.getKey();
- String[] parameterValues = parameter.getValue();
- for (String param : parameterValues) {
- tableFilenameBuilder.append("-" + param);
- }
- if (parameterValues.length < 2) {
- rQueryBuilder.append(parameterName + " = '"
- + parameterValues[0] + "', ");
- } else {
- rQueryBuilder.append(parameterName + " = c(");
- for (int i = 0; i < parameterValues.length - 1; i++) {
- rQueryBuilder.append("'" + parameterValues[i] + "', ");
- }
- rQueryBuilder.append("'" + parameterValues[
- parameterValues.length - 1] + "'), ");
- }
- }
- tableFilenameBuilder.append(".tbl");
- String tableFilename = tableFilenameBuilder.toString();
- rQueryBuilder.append("path = '%s')");
- String rQuery = rQueryBuilder.toString();
- return this.generateTable(rQuery, tableFilename, checkCache);
- }
-
- /* Generate table data using the given R query and filename or read
- * previously generated table data from disk if it's not too old and
- * return table data. */
- private List<Map<String, String>> generateTable(String rQuery,
- String tableFilename, boolean checkCache) {
-
- /* See if we need to generate this table. */
- File tableFile = new File(this.cachedGraphsDirectory + "/"
- + tableFilename);
- byte[] tableBytes = this.generateRObject(rQuery, tableFile,
- tableFilename, checkCache).getBytes();
-
- /* Write the table content to a map. */
- List<Map<String, String>> result = null;
- try {
- result = new ArrayList<Map<String, String>>();
- BufferedReader br = new BufferedReader(new InputStreamReader(
- new ByteArrayInputStream(tableBytes)));
- String line = br.readLine();
- if (line != null) {
- List<String> headers = new ArrayList<String>(Arrays.asList(
- line.split(",")));
- while ((line = br.readLine()) != null) {
- String[] parts = line.split(",");
- if (headers.size() != parts.length) {
- return null;
- }
- Map<String, String> row = new HashMap<String, String>();
- for (int i = 0; i < headers.size(); i++) {
- row.put(headers.get(i), parts[i]);
- }
- result.add(row);
- }
- }
- } catch (IOException e) {
- return null;
- }
-
- /* Return table values. */
- return result;
- }
-
- /* Generate an R object in a separate worker thread, or wait for an
- * already running worker thread to finish and get its result. */
- private RObject generateRObject(String rQuery, File rObjectFile,
- String fileName, boolean checkCache) {
- RObjectGeneratorWorker worker = null;
- synchronized (this.rObjectGeneratorThreads) {
- if (this.rObjectGeneratorThreads.containsKey(rQuery)) {
- worker = this.rObjectGeneratorThreads.get(rQuery);
- } else {
- worker = new RObjectGeneratorWorker(rQuery, rObjectFile,
- fileName, checkCache);
- this.rObjectGeneratorThreads.put(rQuery, worker);
- worker.start();
- }
- }
- try {
- worker.join();
- } catch (InterruptedException e) {
- }
- synchronized (this.rObjectGeneratorThreads) {
- if (this.rObjectGeneratorThreads.containsKey(rQuery) &&
- this.rObjectGeneratorThreads.get(rQuery) == worker) {
- this.rObjectGeneratorThreads.remove(rQuery);
- }
- }
- return worker.getRObject();
- }
-
- private Map<String, RObjectGeneratorWorker> rObjectGeneratorThreads =
- new HashMap<String, RObjectGeneratorWorker>();
-
- private class RObjectGeneratorWorker extends Thread {
-
- private String rQuery;
- private File rObjectFile;
- private String fileName;
- private boolean checkCache;
- private RObject result = null;
-
- public RObjectGeneratorWorker(String rQuery, File rObjectFile,
- String fileName, boolean checkCache) {
- this.rQuery = rQuery;
- this.rObjectFile = rObjectFile;
- this.fileName = fileName;
- this.checkCache = checkCache;
- }
-
- public void run() {
-
- /* See if we need to generate this R object. */
- long now = System.currentTimeMillis();
- if (!this.checkCache || !this.rObjectFile.exists() ||
- this.rObjectFile.lastModified() < now - maxCacheAge * 1000L) {
-
- /* We do. Update the R query to contain the absolute path to the
- * file to be generated, create a connection to Rserve, run the R
- * query, and close the connection. The generated object will be
- * on disk. */
- this.rQuery = String.format(this.rQuery,
- this.rObjectFile.getAbsolutePath());
- try {
- RConnection rc = new RConnection(rserveHost, rservePort);
- rc.eval(this.rQuery);
- rc.close();
- } catch (RserveException e) {
- return;
- }
-
- /* Check that we really just generated the R object. */
- if (!this.rObjectFile.exists() || this.rObjectFile.lastModified()
- < now - maxCacheAge * 1000L) {
- return;
- }
- }
-
- /* Read the R object from disk and write it to a byte array. */
- long lastModified = this.rObjectFile.lastModified();
- try {
- BufferedInputStream bis = new BufferedInputStream(
- new FileInputStream(this.rObjectFile), 1024);
- ByteArrayOutputStream baos = new ByteArrayOutputStream();
- byte[] buffer = new byte[1024];
- int length;
- while ((length = bis.read(buffer)) > 0) {
- baos.write(buffer, 0, length);
- }
- bis.close();
- this.result = new RObject(baos.toByteArray(), this.fileName,
- lastModified);
- } catch (IOException e) {
- return;
- }
- }
-
- public RObject getRObject() {
- return this.result;
- }
- }
-}
diff --git a/src/org/torproject/ernie/web/RelaySearchServlet.java b/src/org/torproject/ernie/web/RelaySearchServlet.java
deleted file mode 100644
index f361833..0000000
--- a/src/org/torproject/ernie/web/RelaySearchServlet.java
+++ /dev/null
@@ -1,505 +0,0 @@
-/* Copyright 2011, 2012 The Tor Project
- * See LICENSE for licensing information */
-package org.torproject.ernie.web;
-
-import java.io.IOException;
-import java.io.UnsupportedEncodingException;
-import java.math.BigInteger;
-import java.sql.Connection;
-import java.sql.ResultSet;
-import java.sql.SQLException;
-import java.sql.Statement;
-import java.text.ParseException;
-import java.text.SimpleDateFormat;
-import java.util.ArrayList;
-import java.util.Calendar;
-import java.util.Collections;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-import java.util.SortedMap;
-import java.util.SortedSet;
-import java.util.TimeZone;
-import java.util.TreeMap;
-import java.util.TreeSet;
-import java.util.logging.Level;
-import java.util.logging.Logger;
-import java.util.regex.Pattern;
-
-import javax.naming.Context;
-import javax.naming.InitialContext;
-import javax.naming.NamingException;
-import javax.servlet.ServletException;
-import javax.servlet.http.HttpServlet;
-import javax.servlet.http.HttpServletRequest;
-import javax.servlet.http.HttpServletResponse;
-import javax.sql.DataSource;
-
-import org.apache.commons.codec.binary.Base64;
-
-/**
- * Web page that allows users to search for relays in the descriptor
- * archives.
- *
- * Possible search terms for testing:
- * - gabelmoo
- * - gabelmoo 2010-09
- * - gabelmoo 2010-09-18
- * - gabelmoo $F2044413DAC2E02E3D6BCF4735A19BCA1DE97281
- * - gabelmoo 80.190.246
- * - gabelmoo $F2044413DAC2E02E3D6BCF4735A19BCA1DE97281 80.190.246
- * - 5898549205 dc737cc9dca16af6 79.212.74.45
- * - 5898549205 dc737cc9dca16af6
- * - 80.190.246.100
- * - $F2044413DAC2E02E3D6BCF4735A19BCA1DE97281
- * - $F2044413DAC2E02E3D6BCF4735A19BCA1DE97281 80.190.246
- * - 58985492
- * - 58985492 79.212.74.45
- */
-public class RelaySearchServlet extends HttpServlet {
-
- private static final long serialVersionUID = -1772662230310611805L;
-
- private Pattern alphaNumDotDashDollarSpacePattern =
- Pattern.compile("[A-Za-z0-9\\.\\-$ ]+");
-
- private Pattern numPattern = Pattern.compile("[0-9]+");
-
- private Pattern hexPattern = Pattern.compile("[A-Fa-f0-9]+");
-
- private Pattern alphaNumPattern = Pattern.compile("[A-Za-z0-9]+");
-
- private SimpleDateFormat dayFormat = new SimpleDateFormat("yyyy-MM-dd");
-
- private SimpleDateFormat monthFormat = new SimpleDateFormat("yyyy-MM");
-
- private SimpleDateFormat dateTimeFormat =
- new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
-
- private long minValidAfterMillis;
-
- private DataSource ds;
-
- private Logger logger;
-
- public void init() {
-
- /* Initialize logger. */
- this.logger = Logger.getLogger(RelaySearchServlet.class.toString());
-
- /* Initialize date format parsers. */
- dayFormat.setTimeZone(TimeZone.getTimeZone("UTC"));
- monthFormat.setTimeZone(TimeZone.getTimeZone("UTC"));
- dateTimeFormat.setTimeZone(TimeZone.getTimeZone("UTC"));
-
- /* Look up data source. */
- try {
- Context cxt = new InitialContext();
- this.ds = (DataSource) cxt.lookup("java:comp/env/jdbc/tordir");
- this.logger.info("Successfully looked up data source.");
- } catch (NamingException e) {
- this.logger.log(Level.WARNING, "Could not look up data source", e);
- }
-
- /* Look up first consensus in the database. */
- try {
- long requestedConnection = System.currentTimeMillis();
- Connection conn = this.ds.getConnection();
- String query = "SELECT MIN(validafter) AS first FROM consensus";
- Statement statement = conn.createStatement();
- ResultSet rs = statement.executeQuery(query);
- if (rs.next()) {
- this.minValidAfterMillis = rs.getTimestamp(1).getTime();
- }
- rs.close();
- statement.close();
- conn.close();
- this.logger.info("Returned a database connection to the pool "
- + "after " + (System.currentTimeMillis()
- - requestedConnection) + " millis.");
- } catch (SQLException e) {
- this.logger.log(Level.WARNING, "Could not look up first consensus "
- + "valid-after time in the database.", e);
- }
- }
-
- public void doGet(HttpServletRequest request,
- HttpServletResponse response) throws IOException,
- ServletException {
-
- /* Read search parameter. If we don't have a search parameter, we're
- * done here. */
- String searchParameter = request.getParameter("search");
- if (searchParameter == null || searchParameter.length() == 0) {
- request.getRequestDispatcher("WEB-INF/relay-search.jsp").forward(
- request, response);
- return;
- }
-
- /* Parse search parameter to identify what nickname, fingerprint,
- * and/or IP address to search for. A valid query contains no more
- * than one identifier for each of the fields. As a special case,
- * there are search terms consisting of 8 to 19 hex characters that
- * can be either a nickname or a fingerprint. */
- String searchNickname = "";
- String searchFingerprint = "";
- String searchIPAddress = "";
- SortedSet<String> searchDays = new TreeSet<String>();
- SortedSet<String> searchMonths = new TreeSet<String>();
- SortedSet<Long> searchDayTimestamps = new TreeSet<Long>();
- SortedSet<Long> searchMonthTimestamps = new TreeSet<Long>();
- boolean validQuery = false;
-
- /* Only parse search parameter if it contains nothing else than
- * alphanumeric characters, dots, and spaces. */
- if (alphaNumDotDashDollarSpacePattern.matcher(searchParameter).
- matches()) {
- SortedSet<String> searchTerms = new TreeSet<String>();
- if (searchParameter.trim().contains(" ")) {
- String[] split = searchParameter.trim().split(" ");
- for (int i = 0; i < split.length; i++) {
- if (split[i].length() > 0) {
- searchTerms.add(split[i]);
- }
- }
- } else {
- searchTerms.add(searchParameter.trim());
- }
-
- /* Parse each search term separately. */
- for (String searchTerm : searchTerms) {
-
- /* If the search term contains a dot, it can only be an IP
- * address. */
- if (searchTerm.contains(".") && !searchTerm.startsWith(".")) {
- String[] octets = searchTerm.split("\\.");
- if (searchIPAddress.length() > 0 || octets.length < 2 ||
- octets.length > 4) {
- validQuery = false;
- break;
- }
- boolean invalidOctet = false;
- StringBuilder sb = new StringBuilder();
- for (int i = 0; i < octets.length; i++) {
- if (!numPattern.matcher(octets[i]).matches() ||
- octets[i].length() > 3 ||
- Integer.parseInt(octets[i]) > 255) {
- invalidOctet = true;
- break;
- } else {
- sb.append("." + Integer.parseInt(octets[i]));
- }
- }
- if (invalidOctet) {
- validQuery = false;
- break;
- }
- if (octets.length < 4) {
- sb.append(".");
- }
- searchIPAddress = sb.toString().substring(1);
- validQuery = true;
- }
-
- /* If the search term contains hyphens, it must be a month or a
- * day. */
- else if (searchTerm.contains("-") &&
- searchTerm.startsWith("20")) {
- try {
- if (searchTerm.length() == 10) {
- searchDayTimestamps.add(dayFormat.parse(searchTerm).
- getTime());
- searchDays.add(searchTerm);
- } else if (searchTerm.length() == 7) {
- searchMonthTimestamps.add(monthFormat.parse(searchTerm).
- getTime());
- searchMonths.add(searchTerm);
- } else {
- validQuery = false;
- break;
- }
- } catch (ParseException e) {
- validQuery = false;
- break;
- }
- }
-
- /* If the search term starts with a $ followed by 8 to 40 hex
- * characters, it must be a fingerprint. */
- else if ((searchTerm.length() >= 9 && searchTerm.length() <= 41 &&
- searchTerm.startsWith("$") &&
- hexPattern.matcher(searchTerm.substring(1)).matches()) ||
- (searchTerm.length() > 19 && searchTerm.length() <= 40 &&
- !searchTerm.startsWith("$") &&
- hexPattern.matcher(searchTerm).matches())) {
- if (searchFingerprint.length() > 0) {
- validQuery = false;
- break;
- }
- searchFingerprint = searchTerm.substring(
- (searchTerm.startsWith("$") ? 1 : 0));
- validQuery = true;
- }
-
- /* If the search term contains up to 19 alphanumerical characters,
- * it must be a nickname. */
- else if (searchTerm.length() <= 19 &&
- alphaNumPattern.matcher(searchTerm).matches()) {
- if (searchNickname.length() > 0) {
- validQuery = false;
- break;
- }
- searchNickname = searchTerm;
- validQuery = true;
- }
-
- /* We didn't recognize this search term. */
- else {
- validQuery = false;
- break;
- }
- }
- }
-
- /* We only accept at most one month or three days, but not both, or
- * people could accidentally keep the database busy. */
- if (searchDays.size() > 3 || searchMonths.size() > 1 ||
- (searchMonths.size() == 1 && searchDays.size() > 0)) {
- validQuery = false;
- }
-
- /* If the query is invalid, stop here. */
- if (!validQuery) {
- request.setAttribute("invalidQuery", "Query is invalid.");
- request.getRequestDispatcher("WEB-INF/relay-search.jsp").
- forward(request, response);
- return;
- }
-
- /* Prepare a string that says what we're searching for. */
- List<String> recognizedSearchTerms = new ArrayList<String>();
- if (searchNickname.length() > 0) {
- recognizedSearchTerms.add("nickname <b>" + searchNickname + "</b>");
- }
- if (searchFingerprint.length() > 0) {
- recognizedSearchTerms.add("fingerprint <b>" + searchFingerprint
- + "</b>");
- }
- if (searchIPAddress.length() > 0) {
- recognizedSearchTerms.add("IP address <b>" + searchIPAddress
- + "</b>");
- }
- List<String> recognizedIntervals = new ArrayList<String>();
- for (String searchTerm : searchMonths) {
- recognizedIntervals.add("in <b>" + searchTerm + "</b>");
- }
- for (String searchTerm : searchDays) {
- recognizedIntervals.add("on <b>" + searchTerm + "</b>");
- }
- StringBuilder searchNoticeBuilder = new StringBuilder();
- searchNoticeBuilder.append("Searching for relays with ");
- if (recognizedSearchTerms.size() == 1) {
- searchNoticeBuilder.append(recognizedSearchTerms.get(0));
- } else if (recognizedSearchTerms.size() == 2) {
- searchNoticeBuilder.append(recognizedSearchTerms.get(0) + " and "
- + recognizedSearchTerms.get(1));
- } else {
- for (int i = 0; i < recognizedSearchTerms.size() - 1; i++) {
- searchNoticeBuilder.append(recognizedSearchTerms.get(i) + ", ");
- }
- searchNoticeBuilder.append("and " + recognizedSearchTerms.get(
- recognizedSearchTerms.size() - 1));
- }
- if (recognizedIntervals.size() == 1) {
- searchNoticeBuilder.append(" running "
- + recognizedIntervals.get(0));
- } else if (recognizedIntervals.size() == 2) {
- searchNoticeBuilder.append(" running " + recognizedIntervals.get(0)
- + " and/or " + recognizedIntervals.get(1));
- } else if (recognizedIntervals.size() > 2) {
- searchNoticeBuilder.append(" running ");
- for (int i = 0; i < recognizedIntervals.size() - 1; i++) {
- searchNoticeBuilder.append(recognizedIntervals.get(i) + ", ");
- }
- searchNoticeBuilder.append("and/or " + recognizedIntervals.get(
- recognizedIntervals.size() - 1));
- }
- searchNoticeBuilder.append(" ...");
- String searchNotice = searchNoticeBuilder.toString();
- request.setAttribute("searchNotice", searchNotice);
-
- /* Prepare the query string. */
- StringBuilder conditionBuilder = new StringBuilder();
- boolean addAnd = false;
- if (searchNickname.length() > 0) {
- conditionBuilder.append((addAnd ? "AND " : "")
- + "LOWER(nickname) LIKE '" + searchNickname.toLowerCase()
- + "%' ");
- addAnd = true;
- }
- if (searchFingerprint.length() > 0) {
- conditionBuilder.append((addAnd ? "AND " : "")
- + "fingerprint LIKE '" + searchFingerprint.toLowerCase()
- + "%' ");
- addAnd = true;
- }
- if (searchIPAddress.length() > 0) {
- conditionBuilder.append((addAnd ? "AND " : "")
- + "address LIKE '" + searchIPAddress + "%' ");
- addAnd = true;
- }
- List<String> timeIntervals = new ArrayList<String>();
- if (searchDayTimestamps.size() > 0 ||
- searchMonthTimestamps.size() > 0) {
- StringBuilder timeIntervalBuilder = new StringBuilder();
- boolean addOr = false;
- timeIntervalBuilder.append("AND (");
- for (long searchTimestamp : searchDayTimestamps) {
- if (searchTimestamp < this.minValidAfterMillis) {
- request.setAttribute("outsideInterval", "Returned search "
- + "results may be incomplete, as our data only dates back "
- + "to " + dateTimeFormat.format(this.minValidAfterMillis)
- + ". Older archives are not available.");
- }
- timeIntervalBuilder.append((addOr ? "OR " : "")
- + "(validafter >= '"
- + dateTimeFormat.format(searchTimestamp) + "' AND "
- + "validafter < '" + dateTimeFormat.format(searchTimestamp
- + 24L * 60L * 60L * 1000L) + "') ");
- addOr = true;
- }
- for (long searchTimestamp : searchMonthTimestamps) {
- if (searchTimestamp < this.minValidAfterMillis) {
- request.setAttribute("outsideInterval", "Returned search "
- + "results may be incomplete, as our data only dates back "
- + "to " + dateTimeFormat.format(this.minValidAfterMillis)
- + ". Older archives are not available.");
- }
- Calendar firstOfNextMonth = Calendar.getInstance(
- TimeZone.getTimeZone("UTC"));
- firstOfNextMonth.setTimeInMillis(searchTimestamp);
- firstOfNextMonth.add(Calendar.MONTH, 1);
- timeIntervalBuilder.append((addOr ? "OR " : "")
- + "(validafter >= '"
- + dateTimeFormat.format(searchTimestamp) + "' AND "
- + "validafter < '" + dateTimeFormat.format(
- firstOfNextMonth.getTimeInMillis()) + "') ");
- addOr = true;
- }
- timeIntervalBuilder.append(") ");
- timeIntervals.add(timeIntervalBuilder.toString());
- } else {
- timeIntervals.add("AND validafter >= '"
- + dateTimeFormat.format(System.currentTimeMillis()
- - 4L * 24L * 60L * 60L * 1000L) + "' ");
- timeIntervals.add("AND validafter >= '"
- + dateTimeFormat.format(System.currentTimeMillis()
- - 30L * 24L * 60L * 60L * 1000L) + "' ");
- }
- List<String> queries = new ArrayList<String>();
- for (String timeInterval : timeIntervals) {
- StringBuilder queryBuilder = new StringBuilder();
- queryBuilder.append("SELECT validafter, fingerprint, descriptor, "
- + "rawdesc FROM statusentry WHERE validafter IN (SELECT "
- + "validafter FROM statusentry WHERE ");
- queryBuilder.append(conditionBuilder.toString());
- queryBuilder.append(timeInterval);
- queryBuilder.append("ORDER BY validafter DESC LIMIT 31) AND ");
- queryBuilder.append(conditionBuilder.toString());
- queryBuilder.append(timeInterval);
- queries.add(queryBuilder.toString());
- }
-
- /* Actually execute the query. */
- long startedQuery = System.currentTimeMillis();
- SortedMap<String, SortedSet<String>> foundDescriptors =
- new TreeMap<String, SortedSet<String>>(
- Collections.reverseOrder());
- Map<String, String> rawValidAfterLines =
- new HashMap<String, String>();
- Map<String, String> rawStatusEntries = new HashMap<String, String>();
- String query = null;
- int matches = 0;
- try {
- long requestedConnection = System.currentTimeMillis();
- Connection conn = this.ds.getConnection();
- while (!queries.isEmpty()) {
- query = queries.remove(0);
- this.logger.info("Running query '" + query + "'.");
- Statement statement = conn.createStatement();
- ResultSet rs = statement.executeQuery(query);
- while (rs.next()) {
- matches++;
- String validAfter = rs.getTimestamp(1).toString().
- substring(0, 19);
- String fingerprint = rs.getString(2);
- if (!foundDescriptors.containsKey(validAfter)) {
- foundDescriptors.put(validAfter, new TreeSet<String>());
- }
- foundDescriptors.get(validAfter).add(validAfter + " "
- + fingerprint);
- if (!rawValidAfterLines.containsKey(validAfter)) {
- rawValidAfterLines.put(validAfter, "<tt>valid-after "
- + "<a href=\"consensus?valid-after="
- + validAfter.replaceAll(":", "-").replaceAll(" ", "-")
- + "\" target=\"_blank\">" + validAfter + "</a></tt><br>");
- }
- byte[] rawStatusEntry = rs.getBytes(4);
- String statusEntryLines = null;
- try {
- statusEntryLines = new String(rawStatusEntry, "US-ASCII");
- } catch (UnsupportedEncodingException e) {
- /* This shouldn't happen, because we know that ASCII is
- * supported. */
- }
- StringBuilder rawStatusEntryBuilder = new StringBuilder();
- String[] lines = statusEntryLines.split("\n");
- for (String line : lines) {
- if (line.startsWith("r ")) {
- String[] parts = line.split(" ");
- String descriptorBase64 = String.format("%040x",
- new BigInteger(1, Base64.decodeBase64(parts[3]
- + "==")));
- rawStatusEntryBuilder.append("<tt>r " + parts[1] + " "
- + parts[2] + " <a href=\"serverdesc?desc-id="
- + descriptorBase64 + "\" target=\"_blank\">" + parts[3]
- + "</a> " + parts[4] + " " + parts[5] + " " + parts[6]
- + " " + parts[7] + " " + parts[8] + "</tt><br>");
- } else {
- rawStatusEntryBuilder.append("<tt>" + line + "</tt><br>");
- }
- }
- rawStatusEntries.put(validAfter + " " + fingerprint,
- rawStatusEntryBuilder.toString());
- }
- rs.close();
- statement.close();
- if (matches >= 31) {
- queries.clear();
- }
- }
- conn.close();
- this.logger.info("Returned a database connection to the pool "
- + "after " + (System.currentTimeMillis()
- - requestedConnection) + " millis.");
- } catch (SQLException e) {
-
- /* Tell the user we have a database problem. */
- response.sendError(HttpServletResponse.SC_INTERNAL_SERVER_ERROR,
- "Database problem");
- return;
- }
- request.setAttribute("query", query);
- request.setAttribute("queryTime", System.currentTimeMillis()
- - startedQuery);
- request.setAttribute("foundDescriptors", foundDescriptors);
- request.setAttribute("rawValidAfterLines", rawValidAfterLines);
- request.setAttribute("rawStatusEntries", rawStatusEntries);
- request.setAttribute("matches", matches);
-
- /* We're done. Let the JSP do the rest. */
- request.getRequestDispatcher("WEB-INF/relay-search.jsp").forward(
- request, response);
- }
-}
-
diff --git a/src/org/torproject/ernie/web/ResearchDataServlet.java b/src/org/torproject/ernie/web/ResearchDataServlet.java
deleted file mode 100644
index 8132af9..0000000
--- a/src/org/torproject/ernie/web/ResearchDataServlet.java
+++ /dev/null
@@ -1,260 +0,0 @@
-/* Copyright 2011, 2012 The Tor Project
- * See LICENSE for licensing information */
-package org.torproject.ernie.web;
-
-import java.io.File;
-import java.io.IOException;
-import java.text.ParseException;
-import java.text.SimpleDateFormat;
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.Date;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-import java.util.SortedMap;
-import java.util.TreeMap;
-
-import javax.servlet.ServletException;
-import javax.servlet.http.HttpServlet;
-import javax.servlet.http.HttpServletRequest;
-import javax.servlet.http.HttpServletResponse;
-
-/**
- * Controller servlet for the Data page. Prepares the various lists of
- * downloadable metrics data files by parsing a file with URLs on other
- * servers and looking at a local directory with files served by local
- * Apache HTTP server. The file with URLs on other servers may contain
- * comment lines starting with #. Recognizes metrics data file types from
- * the file names.
- */
-public class ResearchDataServlet extends HttpServlet {
-
- private static final long serialVersionUID = -5168280373350515577L;
-
- public void doGet(HttpServletRequest request,
- HttpServletResponse response) throws IOException, ServletException {
-
- /* Read local directory with files served by the local Apache HTTP
- * server and add the URLs to the list. */
- List<String> dataFileUrls = new ArrayList<String>();
- String localDataDir = getServletConfig().getInitParameter(
- "localDataDir");
- if (localDataDir != null) {
- try {
- File localDataDirFile = new File(localDataDir);
- if (localDataDirFile.exists() && localDataDirFile.isDirectory()) {
- for (File localDataFile : localDataDirFile.listFiles()) {
- if (!localDataFile.isDirectory()) {
- dataFileUrls.add("/data/" + localDataFile.getName());
- }
- }
- }
- } catch (SecurityException e) {
- /* We're not permitted to read the directory with metrics data
- * files. Ignore. */
- }
- }
-
- /* Prepare data structures that we're going to pass to the JSP. All
- * data structures are (nested) maps with the map keys being used for
- * displaying the files in tables and map values being 2-element
- * arrays containing the file url and optional signature file. */
- SortedMap<Date, Map<String, String[]>> relayDescriptors =
- new TreeMap<Date, Map<String, String[]>>(
- java.util.Collections.reverseOrder());
- String[] certs = new String[2];
- SortedMap<Date, String[]> bridgeDescriptors =
- new TreeMap<Date, String[]>(java.util.Collections.reverseOrder());
- String[] relayStatistics = new String[2];
- SortedMap<Date, String[]> torperfTarballs =
- new TreeMap<Date, String[]>(java.util.Collections.reverseOrder());
- SortedMap<String, Map<String, String[]>> torperfData =
- new TreeMap<String, Map<String, String[]>>();
- SortedMap<Date, String[]> exitLists =
- new TreeMap<Date, String[]>(java.util.Collections.reverseOrder());
- SortedMap<Date, String[]> torperfExperiments =
- new TreeMap<Date, String[]>();
- SortedMap<Date, String[]> bridgePoolAssignments =
- new TreeMap<Date, String[]>(java.util.Collections.reverseOrder());
-
- /* Prepare rewriting Torperf sources. */
- Map<String, String> torperfSources = new HashMap<String, String>();
- torperfSources.put("torperffast", "torperf, fastest");
- torperfSources.put("torperffastratio", "torperf, best ratio");
- torperfSources.put("torperfslow", "torperf, slowest");
- torperfSources.put("torperfslowratio", "torperf, worst ratio");
-
- /* Go through the file list, decide for each file what metrics data
- * type it is, and put it in the appropriate map. */
- SimpleDateFormat monthFormat = new SimpleDateFormat("yyyy-MM");
- SimpleDateFormat dateFormat = new SimpleDateFormat("yyyy-MM-dd");
- List<String> torperfFilesizes = Arrays.asList("50kb,1mb,5mb".
- split(","));
- for (String url : dataFileUrls) {
- if (!url.contains("/")) {
- continue;
- }
- String filename = url.substring(url.lastIndexOf("/") + 1);
-
- /* URL contains relay descriptors. */
- if (filename.startsWith("tor-20") ||
- filename.startsWith("statuses-20") ||
- filename.startsWith("server-descriptors-20") ||
- filename.startsWith("extra-infos-20") ||
- filename.startsWith("votes-20") ||
- filename.startsWith("consensuses-20")) {
- String type = filename.substring(0, filename.indexOf("-20"));
- String yearMonth = filename.substring(filename.indexOf("20"));
- yearMonth = yearMonth.substring(0, 7);
- Date month = null;
- try {
- month = monthFormat.parse(yearMonth);
- } catch (ParseException e) {
- /* Ignore this URL. */
- continue;
- }
- int index = filename.endsWith(".asc") ? 1 : 0;
- if (!relayDescriptors.containsKey(month)) {
- relayDescriptors.put(month, new HashMap<String, String[]>());
- }
- if (!relayDescriptors.get(month).containsKey(type)) {
- relayDescriptors.get(month).put(type, new String[2]);
- }
- relayDescriptors.get(month).get(type)[index] = url;
-
- /* URL contains v3 certificates. */
- } else if (filename.startsWith("certs.tar")) {
- int index = filename.endsWith(".asc") ? 1 : 0;
- certs[index] = url;
-
- /* URL contains bridge descriptors. */
- } else if (filename.startsWith("bridge-descriptors-20")) {
- String yearMonth = filename.substring(filename.indexOf("20"));
- yearMonth = yearMonth.substring(0, 7);
- Date month = null;
- try {
- month = monthFormat.parse(yearMonth);
- } catch (ParseException e) {
- /* Ignore this URL. */
- continue;
- }
- int index = filename.endsWith(".asc") ? 1 : 0;
- if (!bridgeDescriptors.containsKey(month)) {
- bridgeDescriptors.put(month, new String[2]);
- }
- bridgeDescriptors.get(month)[index] = url;
-
- /* URL contains relay statistics. */
- } else if (filename.startsWith("relay-statistics.tar.bz2")) {
- int index = filename.endsWith(".asc") ? 1 : 0;
- relayStatistics[index] = url;
-
- /* URL contains Torperf tarball. */
- } else if (filename.startsWith("torperf-20")) {
- String yearMonth = filename.substring(filename.indexOf("20"));
- yearMonth = yearMonth.substring(0, 7);
- Date month = null;
- try {
- month = monthFormat.parse(yearMonth);
- } catch (ParseException e) {
- /* Ignore this URL. */
- continue;
- }
- if (!torperfTarballs.containsKey(month)) {
- torperfTarballs.put(month, new String[2]);
- }
- torperfTarballs.get(month)[0] = url;
-
- /* URL contains Torperf data file. */
- } else if (filename.endsWith("b.data") ||
- filename.endsWith("b.extradata")) {
- boolean isExtraData = filename.endsWith("b.extradata");
- String[] parts = filename.split("-");
- if (parts.length != 2) {
- continue;
- }
- String source = parts[0];
- if (torperfSources.containsKey(source)) {
- source = torperfSources.get(source);
- }
- String filesize = parts[1];
- filesize = filesize.substring(0, filesize.length()
- - (isExtraData ? 10 : 5));
- if (!torperfFilesizes.contains(filesize)) {
- continue;
- }
- if (!torperfData.containsKey(source)) {
- torperfData.put(source, new HashMap<String, String[]>());
- }
- if (!torperfData.get(source).containsKey(filesize)) {
- torperfData.get(source).put(filesize, new String[2]);
- }
- torperfData.get(source).get(filesize)[isExtraData ? 1 : 0] = url;
-
- /* URL contains Torperf experiment tarball. */
- } else if (filename.startsWith("torperf-experiment-20")) {
- String dateString = filename.substring(filename.indexOf("20"));
- dateString = dateString.substring(0, 10);
- Date date = null;
- try {
- date = dateFormat.parse(dateString);
- } catch (ParseException e) {
- /* Ignore this URL. */
- continue;
- }
- if (!torperfExperiments.containsKey(date)) {
- torperfExperiments.put(date, new String[2]);
- }
- torperfExperiments.get(date)[0] = url;
-
- /* URL contains exit list. */
- } else if (filename.startsWith("exit-list-20")) {
- String yearMonth = filename.substring(filename.indexOf("20"));
- yearMonth = yearMonth.substring(0, 7);
- Date month = null;
- try {
- month = monthFormat.parse(yearMonth);
- } catch (ParseException e) {
- /* Ignore this URL. */
- continue;
- }
- if (!exitLists.containsKey(month)) {
- exitLists.put(month, new String[2]);
- }
- exitLists.get(month)[0] = url;
-
- /* URL contains bridge pool assignments. */
- } else if (filename.startsWith("bridge-pool-assignments-20")) {
- String yearMonth = filename.substring(filename.indexOf("20"));
- yearMonth = yearMonth.substring(0, 7);
- Date month = null;
- try {
- month = monthFormat.parse(yearMonth);
- } catch (ParseException e) {
- /* Ignore this URL. */
- continue;
- }
- if (!bridgePoolAssignments.containsKey(month)) {
- bridgePoolAssignments.put(month, new String[2]);
- }
- bridgePoolAssignments.get(month)[0] = url;
- }
- }
-
- /* Add the maps to the request and forward it to the JSP to display
- * the page. */
- request.setAttribute("relayDescriptors", relayDescriptors);
- request.setAttribute("certs", certs);
- request.setAttribute("bridgeDescriptors", bridgeDescriptors);
- request.setAttribute("relayStatistics", relayStatistics);
- request.setAttribute("torperfData", torperfData);
- request.setAttribute("exitLists", exitLists);
- request.setAttribute("torperfTarballs", torperfTarballs);
- request.setAttribute("torperfExperiments", torperfExperiments);
- request.setAttribute("bridgePoolAssignments", bridgePoolAssignments);
- request.getRequestDispatcher("WEB-INF/data.jsp").forward(request,
- response);
- }
-}
-
diff --git a/src/org/torproject/ernie/web/ResearchFormatsServlet.java b/src/org/torproject/ernie/web/ResearchFormatsServlet.java
deleted file mode 100644
index 96bca4f..0000000
--- a/src/org/torproject/ernie/web/ResearchFormatsServlet.java
+++ /dev/null
@@ -1,24 +0,0 @@
-/* Copyright 2011, 2012 The Tor Project
- * See LICENSE for licensing information */
-package org.torproject.ernie.web;
-
-import java.io.IOException;
-
-import javax.servlet.ServletException;
-import javax.servlet.http.HttpServlet;
-import javax.servlet.http.HttpServletRequest;
-import javax.servlet.http.HttpServletResponse;
-
-public class ResearchFormatsServlet extends HttpServlet {
-
- private static final long serialVersionUID = 5666493868675314116L;
-
- public void doGet(HttpServletRequest request,
- HttpServletResponse response) throws IOException, ServletException {
-
- /* Forward the request to the JSP that does all the hard work. */
- request.getRequestDispatcher("WEB-INF/formats.jsp").forward(request,
- response);
- }
-}
-
diff --git a/src/org/torproject/ernie/web/ResearchPapersServlet.java b/src/org/torproject/ernie/web/ResearchPapersServlet.java
deleted file mode 100644
index ed3414c..0000000
--- a/src/org/torproject/ernie/web/ResearchPapersServlet.java
+++ /dev/null
@@ -1,24 +0,0 @@
-/* Copyright 2011, 2012 The Tor Project
- * See LICENSE for licensing information */
-package org.torproject.ernie.web;
-
-import java.io.IOException;
-
-import javax.servlet.ServletException;
-import javax.servlet.http.HttpServlet;
-import javax.servlet.http.HttpServletRequest;
-import javax.servlet.http.HttpServletResponse;
-
-public class ResearchPapersServlet extends HttpServlet {
-
- private static final long serialVersionUID = -8135459207158536268L;
-
- public void doGet(HttpServletRequest request,
- HttpServletResponse response) throws IOException, ServletException {
-
- /* Forward the request to the JSP that does all the hard work. */
- request.getRequestDispatcher("WEB-INF/papers.jsp").forward(request,
- response);
- }
-}
-
diff --git a/src/org/torproject/ernie/web/ResearchToolsServlet.java b/src/org/torproject/ernie/web/ResearchToolsServlet.java
deleted file mode 100644
index 2345df4..0000000
--- a/src/org/torproject/ernie/web/ResearchToolsServlet.java
+++ /dev/null
@@ -1,24 +0,0 @@
-/* Copyright 2011, 2012 The Tor Project
- * See LICENSE for licensing information */
-package org.torproject.ernie.web;
-
-import java.io.IOException;
-
-import javax.servlet.ServletException;
-import javax.servlet.http.HttpServlet;
-import javax.servlet.http.HttpServletRequest;
-import javax.servlet.http.HttpServletResponse;
-
-public class ResearchToolsServlet extends HttpServlet {
-
- private static final long serialVersionUID = -3344204426180358872L;
-
- public void doGet(HttpServletRequest request,
- HttpServletResponse response) throws IOException, ServletException {
-
- /* Forward the request to the JSP that does all the hard work. */
- request.getRequestDispatcher("WEB-INF/tools.jsp").forward(request,
- response);
- }
-}
-
diff --git a/src/org/torproject/ernie/web/ServerDescriptorServlet.java b/src/org/torproject/ernie/web/ServerDescriptorServlet.java
deleted file mode 100644
index 5f8ba5b..0000000
--- a/src/org/torproject/ernie/web/ServerDescriptorServlet.java
+++ /dev/null
@@ -1,132 +0,0 @@
-/* Copyright 2011, 2012 The Tor Project
- * See LICENSE for licensing information */
-package org.torproject.ernie.web;
-
-import java.io.BufferedOutputStream;
-import java.io.IOException;
-import java.sql.Connection;
-import java.sql.ResultSet;
-import java.sql.SQLException;
-import java.sql.Statement;
-import java.util.ArrayList;
-import java.util.List;
-import java.util.logging.Level;
-import java.util.logging.Logger;
-import java.util.regex.Matcher;
-import java.util.regex.Pattern;
-
-import javax.naming.Context;
-import javax.naming.InitialContext;
-import javax.naming.NamingException;
-import javax.servlet.ServletException;
-import javax.servlet.http.HttpServlet;
-import javax.servlet.http.HttpServletRequest;
-import javax.servlet.http.HttpServletResponse;
-import javax.sql.DataSource;
-
-public class ServerDescriptorServlet extends HttpServlet {
-
- private static final long serialVersionUID = -7935883442750583462L;
-
- private DataSource ds;
-
- private Logger logger;
-
- public void init() {
-
- /* Initialize logger. */
- this.logger = Logger.getLogger(
- ServerDescriptorServlet.class.toString());
-
- /* Look up data source. */
- try {
- Context cxt = new InitialContext();
- this.ds = (DataSource) cxt.lookup("java:comp/env/jdbc/exonerator");
- this.logger.info("Successfully looked up data source.");
- } catch (NamingException e) {
- this.logger.log(Level.WARNING, "Could not look up data source", e);
- }
- }
-
- public void doGet(HttpServletRequest request,
- HttpServletResponse response) throws IOException,
- ServletException {
-
- /* Read desc-id parameter. */
- String descIdParameter = request.getParameter("desc-id");
-
- /* See if we were given a desc-id parameter. If so, look up this
- * descriptor and return it. */
- List<byte[]> rawDescriptors = new ArrayList<byte[]>();
- String filename = null;
- if (descIdParameter != null) {
- if (descIdParameter.length() < 8 ||
- descIdParameter.length() > 40) {
- response.sendError(HttpServletResponse.SC_BAD_REQUEST);
- return;
- }
- String descId = descIdParameter.toLowerCase();
- Pattern descIdPattern = Pattern.compile("^[0-9a-f]+$");
- Matcher descIdMatcher = descIdPattern.matcher(descId);
- if (!descIdMatcher.matches()) {
- response.sendError(HttpServletResponse.SC_BAD_REQUEST);
- return;
- }
-
- /* Look up descriptor in the database. */
- try {
- long requestedConnection = System.currentTimeMillis();
- Connection conn = ds.getConnection();
- Statement statement = conn.createStatement();
- String query = "SELECT descriptor, rawdescriptor FROM descriptor "
- + "WHERE descriptor LIKE '" + descId + "%'";
- ResultSet rs = statement.executeQuery(query);
- if (rs.next()) {
- filename = rs.getString(1);
- rawDescriptors.add(rs.getBytes(2));
- }
- rs.close();
- statement.close();
- conn.close();
- this.logger.info("Returned a database connection to the pool "
- + "after " + (System.currentTimeMillis()
- - requestedConnection) + " millis.");
- } catch (SQLException e) {
- response.sendError(HttpServletResponse.SC_INTERNAL_SERVER_ERROR);
- return;
- }
-
- /* Return an error if no desc-id parameter was given. */
- } else {
- response.sendError(HttpServletResponse.SC_BAD_REQUEST);
- return;
- }
-
- /* Write response. */
- if (rawDescriptors.size() == 0) {
- response.sendError(HttpServletResponse.SC_NOT_FOUND);
- return;
- }
- try {
- response.setContentType("text/plain");
- int responseLength = 0;
- for (byte[] rawDescriptor : rawDescriptors) {
- responseLength += rawDescriptor.length;
- }
- response.setHeader("Content-Length", String.valueOf(
- responseLength));
- response.setHeader("Content-Disposition", "inline; filename=\""
- + filename + "\"");
- BufferedOutputStream output = new BufferedOutputStream(
- response.getOutputStream());
- for (byte[] rawDescriptor : rawDescriptors) {
- output.write(rawDescriptor);
- }
- output.flush();
- output.close();
- } finally {
- /* Nothing to do here. */
- }
- }
-}
-
diff --git a/src/org/torproject/ernie/web/TableParameterChecker.java b/src/org/torproject/ernie/web/TableParameterChecker.java
deleted file mode 100644
index e4b3e13..0000000
--- a/src/org/torproject/ernie/web/TableParameterChecker.java
+++ /dev/null
@@ -1,120 +0,0 @@
-/* Copyright 2011, 2012 The Tor Project
- * See LICENSE for licensing information */
-package org.torproject.ernie.web;
-
-import java.text.ParseException;
-import java.text.SimpleDateFormat;
-import java.util.Arrays;
-import java.util.HashMap;
-import java.util.HashSet;
-import java.util.Map;
-import java.util.Set;
-import java.util.TimeZone;
-
-/**
- * Checks request parameters passed to generate tables.
- */
-public class TableParameterChecker {
-
- /**
- * Singleton instance of this class.
- */
- private static TableParameterChecker instance =
- new TableParameterChecker();
-
- /**
- * Returns the singleton instance of this class.
- */
- public static TableParameterChecker getInstance() {
- return instance;
- }
-
- /* Date format for parsing start and end dates. */
- private SimpleDateFormat dateFormat;
-
- /* Available tables with corresponding parameter lists. */
- private Map<String, String> availableTables;
-
- /**
- * Initializes map with valid parameters for each of the graphs.
- */
- public TableParameterChecker() {
- this.dateFormat = new SimpleDateFormat("yyyy-MM-dd");
- this.dateFormat.setTimeZone(TimeZone.getTimeZone("UTC"));
- }
-
- public void setAvailableTables(Map<String, String> availableTables) {
- this.availableTables = availableTables;
- }
-
- /**
- * Checks request parameters for the given table type and returns a map
- * of recognized parameters, or null if the table type doesn't exist or
- * the parameters are invalid.
- */
- public Map<String, String[]> checkParameters(String tableType,
- Map requestParameters) {
-
- /* Check if the table type exists. */
- if (tableType == null ||
- !this.availableTables.containsKey(tableType)) {
- return null;
- }
-
- /* Find out which other parameters are supported by this table type
- * and parse them if they are given. */
- Set<String> supportedTableParameters = new HashSet<String>(Arrays.
- asList(this.availableTables.get(tableType).split(",")));
- Map<String, String[]> recognizedTableParameters =
- new HashMap<String, String[]>();
-
- /* Parse start and end dates if supported by the table type. If no end
- * date is provided, set it to today. If no start date is provided,
- * set it to 90 days before the end date. Make sure that start date
- * precedes end date. */
- if (supportedTableParameters.contains("start") ||
- supportedTableParameters.contains("end")) {
- String[] startParameter = null;
- String[] endParameter = null;
- if (requestParameters != null) {
- startParameter = (String[]) requestParameters.get("start");
- endParameter = (String[]) requestParameters.get("end");
- }
- long endTimestamp = System.currentTimeMillis();
- if (endParameter != null && endParameter.length > 0 &&
- endParameter[0].length() > 0) {
- try {
- endTimestamp = dateFormat.parse(endParameter[0]).getTime();
- } catch (ParseException e) {
- return null;
- }
- if (!endParameter[0].startsWith("20")) {
- return null;
- }
- }
- endParameter = new String[] { dateFormat.format(endTimestamp) };
- long startTimestamp = endTimestamp - 90L * 24L * 60L * 60L * 1000L;
- if (startParameter != null && startParameter.length > 0 &&
- startParameter[0].length() > 0) {
- try {
- startTimestamp = dateFormat.parse(startParameter[0]).getTime();
- } catch (ParseException e) {
- return null;
- }
- if (!startParameter[0].startsWith("20")) {
- return null;
- }
- }
- startParameter = new String[] { dateFormat.format(startTimestamp) };
- if (startTimestamp > endTimestamp) {
- return null;
- }
- recognizedTableParameters.put("start", startParameter);
- recognizedTableParameters.put("end", endParameter);
- }
-
- /* We now have a map with all required table parameters. Return it. */
- return recognizedTableParameters;
- }
-}
-
diff --git a/src/org/torproject/ernie/web/graphs/Countries.java b/src/org/torproject/ernie/web/graphs/Countries.java
new file mode 100644
index 0000000..c702684
--- /dev/null
+++ b/src/org/torproject/ernie/web/graphs/Countries.java
@@ -0,0 +1,284 @@
+/* Copyright 2011, 2012 The Tor Project
+ * See LICENSE for licensing information */
+package org.torproject.ernie.web.graphs;
+
+import java.util.ArrayList;
+import java.util.List;
+
+public class Countries {
+
+ private static Countries instance = new Countries();
+
+ public static Countries getInstance() {
+ return Countries.instance;
+ }
+
+ /* List of arrays of length 2, containing country codes at [0] and
+ * country names at [1], alphabetically ordered by country names. */
+ private List<String[]> knownCountries;
+
+ private Countries() {
+ this.knownCountries = new ArrayList<String[]>();
+ this.knownCountries.add("af;Afghanistan".split(";"));
+ this.knownCountries.add("ax;Aland Islands".split(";"));
+ this.knownCountries.add("al;Albania".split(";"));
+ this.knownCountries.add("dz;Algeria".split(";"));
+ this.knownCountries.add("as;American Samoa".split(";"));
+ this.knownCountries.add("ad;Andorra".split(";"));
+ this.knownCountries.add("ao;Angola".split(";"));
+ this.knownCountries.add("ai;Anguilla".split(";"));
+ this.knownCountries.add("aq;Antarctica".split(";"));
+ this.knownCountries.add("ag;Antigua and Barbuda".split(";"));
+ this.knownCountries.add("ar;Argentina".split(";"));
+ this.knownCountries.add("am;Armenia".split(";"));
+ this.knownCountries.add("aw;Aruba".split(";"));
+ this.knownCountries.add("au;Australia".split(";"));
+ this.knownCountries.add("at;Austria".split(";"));
+ this.knownCountries.add("az;Azerbaijan".split(";"));
+ this.knownCountries.add("bs;Bahamas".split(";"));
+ this.knownCountries.add("bh;Bahrain".split(";"));
+ this.knownCountries.add("bd;Bangladesh".split(";"));
+ this.knownCountries.add("bb;Barbados".split(";"));
+ this.knownCountries.add("by;Belarus".split(";"));
+ this.knownCountries.add("be;Belgium".split(";"));
+ this.knownCountries.add("bz;Belize".split(";"));
+ this.knownCountries.add("bj;Benin".split(";"));
+ this.knownCountries.add("bm;Bermuda".split(";"));
+ this.knownCountries.add("bt;Bhutan".split(";"));
+ this.knownCountries.add("bo;Bolivia".split(";"));
+ this.knownCountries.add("ba;Bosnia and Herzegovina".split(";"));
+ this.knownCountries.add("bw;Botswana".split(";"));
+ this.knownCountries.add("bv;Bouvet Island".split(";"));
+ this.knownCountries.add("br;Brazil".split(";"));
+ this.knownCountries.add("io;British Indian Ocean Territory".
+ split(";"));
+ this.knownCountries.add("bn;Brunei".split(";"));
+ this.knownCountries.add("bg;Bulgaria".split(";"));
+ this.knownCountries.add("bf;Burkina Faso".split(";"));
+ this.knownCountries.add("mm;Burma".split(";"));
+ this.knownCountries.add("bi;Burundi".split(";"));
+ this.knownCountries.add("kh;Cambodia".split(";"));
+ this.knownCountries.add("cm;Cameroon".split(";"));
+ this.knownCountries.add("ca;Canada".split(";"));
+ this.knownCountries.add("cv;Cape Verde".split(";"));
+ this.knownCountries.add("ky;Cayman Islands".split(";"));
+ this.knownCountries.add("cf;Central African Republic".split(";"));
+ this.knownCountries.add("td;Chad".split(";"));
+ this.knownCountries.add("cl;Chile".split(";"));
+ this.knownCountries.add("cn;China".split(";"));
+ this.knownCountries.add("cx;Christmas Island".split(";"));
+ this.knownCountries.add("cc;Cocos (Keeling) Islands".split(";"));
+ this.knownCountries.add("co;Colombia".split(";"));
+ this.knownCountries.add("km;Comoros".split(";"));
+ this.knownCountries.add("cd;Congo, The Democratic Republic of the".
+ split(";"));
+ this.knownCountries.add("cg;Congo".split(";"));
+ this.knownCountries.add("ck;Cook Islands".split(";"));
+ this.knownCountries.add("cr;Costa Rica".split(";"));
+ this.knownCountries.add("ci:Côte d'Ivoire".split(":"));
+ this.knownCountries.add("hr;Croatia".split(";"));
+ this.knownCountries.add("cu;Cuba".split(";"));
+ this.knownCountries.add("cy;Cyprus".split(";"));
+ this.knownCountries.add("cz;Czech Republic".split(";"));
+ this.knownCountries.add("dk;Denmark".split(";"));
+ this.knownCountries.add("dj;Djibouti".split(";"));
+ this.knownCountries.add("dm;Dominica".split(";"));
+ this.knownCountries.add("do;Dominican Republic".split(";"));
+ this.knownCountries.add("ec;Ecuador".split(";"));
+ this.knownCountries.add("eg;Egypt".split(";"));
+ this.knownCountries.add("sv;El Salvador".split(";"));
+ this.knownCountries.add("gq;Equatorial Guinea".split(";"));
+ this.knownCountries.add("er;Eritrea".split(";"));
+ this.knownCountries.add("ee;Estonia".split(";"));
+ this.knownCountries.add("et;Ethiopia".split(";"));
+ this.knownCountries.add("fk;Falkland Islands (Malvinas)".split(";"));
+ this.knownCountries.add("fo;Faroe Islands".split(";"));
+ this.knownCountries.add("fj;Fiji".split(";"));
+ this.knownCountries.add("fi;Finland".split(";"));
+ this.knownCountries.add("fx;France, Metropolitan".split(";"));
+ this.knownCountries.add("fr;France".split(";"));
+ this.knownCountries.add("gf;French Guiana".split(";"));
+ this.knownCountries.add("pf;French Polynesia".split(";"));
+ this.knownCountries.add("tf;French Southern Territories".split(";"));
+ this.knownCountries.add("ga;Gabon".split(";"));
+ this.knownCountries.add("gm;Gambia".split(";"));
+ this.knownCountries.add("ge;Georgia".split(";"));
+ this.knownCountries.add("de;Germany".split(";"));
+ this.knownCountries.add("gh;Ghana".split(";"));
+ this.knownCountries.add("gi;Gibraltar".split(";"));
+ this.knownCountries.add("gr;Greece".split(";"));
+ this.knownCountries.add("gl;Greenland".split(";"));
+ this.knownCountries.add("gd;Grenada".split(";"));
+ this.knownCountries.add("gp;Guadeloupe".split(";"));
+ this.knownCountries.add("gu;Guam".split(";"));
+ this.knownCountries.add("gt;Guatemala".split(";"));
+ this.knownCountries.add("gg;Guernsey".split(";"));
+ this.knownCountries.add("gn;Guinea".split(";"));
+ this.knownCountries.add("gw;Guinea-Bissau".split(";"));
+ this.knownCountries.add("gy;Guyana".split(";"));
+ this.knownCountries.add("ht;Haiti".split(";"));
+ this.knownCountries.add("hm;Heard Island and McDonald Islands".
+ split(";"));
+ this.knownCountries.add("va;Vatican City".split(";"));
+ this.knownCountries.add("hn;Honduras".split(";"));
+ this.knownCountries.add("hk;Hong Kong".split(";"));
+ this.knownCountries.add("hu;Hungary".split(";"));
+ this.knownCountries.add("is;Iceland".split(";"));
+ this.knownCountries.add("in;India".split(";"));
+ this.knownCountries.add("id;Indonesia".split(";"));
+ this.knownCountries.add("ir;Iran".split(";"));
+ this.knownCountries.add("iq;Iraq".split(";"));
+ this.knownCountries.add("ie;Ireland".split(";"));
+ this.knownCountries.add("im;Isle of Man".split(";"));
+ this.knownCountries.add("il;Israel".split(";"));
+ this.knownCountries.add("it;Italy".split(";"));
+ this.knownCountries.add("jm;Jamaica".split(";"));
+ this.knownCountries.add("jp;Japan".split(";"));
+ this.knownCountries.add("je;Jersey".split(";"));
+ this.knownCountries.add("jo;Jordan".split(";"));
+ this.knownCountries.add("kz;Kazakhstan".split(";"));
+ this.knownCountries.add("ke;Kenya".split(";"));
+ this.knownCountries.add("ki;Kiribati".split(";"));
+ this.knownCountries.add("kp;North Korea".split(";"));
+ this.knownCountries.add("kr;Korea, Republic of".split(";"));
+ this.knownCountries.add("kw;Kuwait".split(";"));
+ this.knownCountries.add("kg;Kyrgyzstan".split(";"));
+ this.knownCountries.add("la;Laos".split(";"));
+ this.knownCountries.add("lv;Latvia".split(";"));
+ this.knownCountries.add("lb;Lebanon".split(";"));
+ this.knownCountries.add("ls;Lesotho".split(";"));
+ this.knownCountries.add("lr;Liberia".split(";"));
+ this.knownCountries.add("ly;Libya".split(";"));
+ this.knownCountries.add("li;Liechtenstein".split(";"));
+ this.knownCountries.add("lt;Lithuania".split(";"));
+ this.knownCountries.add("lu;Luxembourg".split(";"));
+ this.knownCountries.add("mo;Macau".split(";"));
+ this.knownCountries.add("mk;Macedonia".split(";"));
+ this.knownCountries.add("mg;Madagascar".split(";"));
+ this.knownCountries.add("mw;Malawi".split(";"));
+ this.knownCountries.add("my;Malaysia".split(";"));
+ this.knownCountries.add("mv;Maldives".split(";"));
+ this.knownCountries.add("ml;Mali".split(";"));
+ this.knownCountries.add("mt;Malta".split(";"));
+ this.knownCountries.add("mh;Marshall Islands".split(";"));
+ this.knownCountries.add("mq;Martinique".split(";"));
+ this.knownCountries.add("mr;Mauritania".split(";"));
+ this.knownCountries.add("mu;Mauritius".split(";"));
+ this.knownCountries.add("yt;Mayotte".split(";"));
+ this.knownCountries.add("mx;Mexico".split(";"));
+ this.knownCountries.add("fm;Micronesia, Federated States of".
+ split(";"));
+ this.knownCountries.add("md;Moldova, Republic of".split(";"));
+ this.knownCountries.add("mc;Monaco".split(";"));
+ this.knownCountries.add("mn;Mongolia".split(";"));
+ this.knownCountries.add("me;Montenegro".split(";"));
+ this.knownCountries.add("ms;Montserrat".split(";"));
+ this.knownCountries.add("ma;Morocco".split(";"));
+ this.knownCountries.add("mz;Mozambique".split(";"));
+ this.knownCountries.add("mm;Burma".split(";"));
+ this.knownCountries.add("na;Namibia".split(";"));
+ this.knownCountries.add("nr;Nauru".split(";"));
+ this.knownCountries.add("np;Nepal".split(";"));
+ this.knownCountries.add("an;Netherlands Antilles".split(";"));
+ this.knownCountries.add("nl;Netherlands".split(";"));
+ this.knownCountries.add("nc;New Caledonia".split(";"));
+ this.knownCountries.add("nz;New Zealand".split(";"));
+ this.knownCountries.add("ni;Nicaragua".split(";"));
+ this.knownCountries.add("ne;Niger".split(";"));
+ this.knownCountries.add("ng;Nigeria".split(";"));
+ this.knownCountries.add("nu;Niue".split(";"));
+ this.knownCountries.add("nf;Norfolk Island".split(";"));
+ this.knownCountries.add("mp;Northern Mariana Islands".split(";"));
+ this.knownCountries.add("no;Norway".split(";"));
+ this.knownCountries.add("om;Oman".split(";"));
+ this.knownCountries.add("pk;Pakistan".split(";"));
+ this.knownCountries.add("pw;Palau".split(";"));
+ this.knownCountries.add("ps;Palestinian Territory".split(";"));
+ this.knownCountries.add("pa;Panama".split(";"));
+ this.knownCountries.add("pg;Papua New Guinea".split(";"));
+ this.knownCountries.add("py;Paraguay".split(";"));
+ this.knownCountries.add("pe;Peru".split(";"));
+ this.knownCountries.add("ph;Philippines".split(";"));
+ this.knownCountries.add("pn;Pitcairn Islands".split(";"));
+ this.knownCountries.add("pl;Poland".split(";"));
+ this.knownCountries.add("pt;Portugal".split(";"));
+ this.knownCountries.add("pr;Puerto Rico".split(";"));
+ this.knownCountries.add("qa;Qatar".split(";"));
+ this.knownCountries.add("re;Reunion".split(";"));
+ this.knownCountries.add("ro;Romania".split(";"));
+ this.knownCountries.add("ru;Russia".split(";"));
+ this.knownCountries.add("rw;Rwanda".split(";"));
+ this.knownCountries.add("bl;Saint Bartelemey".split(";"));
+ this.knownCountries.add("sh;Saint Helena".split(";"));
+ this.knownCountries.add("kn;Saint Kitts and Nevis".split(";"));
+ this.knownCountries.add("lc;Saint Lucia".split(";"));
+ this.knownCountries.add("mf;Saint Martin".split(";"));
+ this.knownCountries.add("pm;Saint Pierre and Miquelon".split(";"));
+ this.knownCountries.add("vc;Saint Vincent and the Grenadines".
+ split(";"));
+ this.knownCountries.add("ws;Samoa".split(";"));
+ this.knownCountries.add("sm;San Marino".split(";"));
+ this.knownCountries.add("st:São Tomé and Príncipe".
+ split(":"));
+ this.knownCountries.add("sa;Saudi Arabia".split(";"));
+ this.knownCountries.add("sn;Senegal".split(";"));
+ this.knownCountries.add("rs;Serbia".split(";"));
+ this.knownCountries.add("sc;Seychelles".split(";"));
+ this.knownCountries.add("sl;Sierra Leone".split(";"));
+ this.knownCountries.add("sg;Singapore".split(";"));
+ this.knownCountries.add("sk;Slovakia".split(";"));
+ this.knownCountries.add("si;Slovenia".split(";"));
+ this.knownCountries.add("sb;Solomon Islands".split(";"));
+ this.knownCountries.add("so;Somalia".split(";"));
+ this.knownCountries.add("za;South Africa".split(";"));
+ this.knownCountries.add(("gs;South Georgia and the South Sandwich "
+ + "Islands").split(";"));
+ this.knownCountries.add("es;Spain".split(";"));
+ this.knownCountries.add("lk;Sri Lanka".split(";"));
+ this.knownCountries.add("sd;Sudan".split(";"));
+ this.knownCountries.add("sr;Suriname".split(";"));
+ this.knownCountries.add("sj;Svalbard and Jan Mayen".split(";"));
+ this.knownCountries.add("sz;Swaziland".split(";"));
+ this.knownCountries.add("se;Sweden".split(";"));
+ this.knownCountries.add("ch;Switzerland".split(";"));
+ this.knownCountries.add("sy;Syrian Arab Republic".split(";"));
+ this.knownCountries.add("tw;Taiwan".split(";"));
+ this.knownCountries.add("tj;Tajikistan".split(";"));
+ this.knownCountries.add("tz;Tanzania, United Republic of".split(";"));
+ this.knownCountries.add("th;Thailand".split(";"));
+ this.knownCountries.add("tl;East Timor".split(";"));
+ this.knownCountries.add("tg;Togo".split(";"));
+ this.knownCountries.add("tk;Tokelau".split(";"));
+ this.knownCountries.add("to;Tonga".split(";"));
+ this.knownCountries.add("tt;Trinidad and Tobago".split(";"));
+ this.knownCountries.add("tn;Tunisia".split(";"));
+ this.knownCountries.add("tr;Turkey".split(";"));
+ this.knownCountries.add("tm;Turkmenistan".split(";"));
+ this.knownCountries.add("tc;Turks and Caicos Islands".split(";"));
+ this.knownCountries.add("tv;Tuvalu".split(";"));
+ this.knownCountries.add("ug;Uganda".split(";"));
+ this.knownCountries.add("ua;Ukraine".split(";"));
+ this.knownCountries.add("ae;United Arab Emirates".split(";"));
+ this.knownCountries.add("gb;United Kingdom".split(";"));
+ this.knownCountries.add("um;United States Minor Outlying Islands".
+ split(";"));
+ this.knownCountries.add("us;United States".split(";"));
+ this.knownCountries.add("uy;Uruguay".split(";"));
+ this.knownCountries.add("uz;Uzbekistan".split(";"));
+ this.knownCountries.add("vu;Vanuatu".split(";"));
+ this.knownCountries.add("ve;Venezuela".split(";"));
+ this.knownCountries.add("vn;Vietnam".split(";"));
+ this.knownCountries.add("vg;Virgin Islands, British".split(";"));
+ this.knownCountries.add("vi;Virgin Islands, U.S.".split(";"));
+ this.knownCountries.add("wf;Wallis and Futuna".split(";"));
+ this.knownCountries.add("eh;Western Sahara".split(";"));
+ this.knownCountries.add("ye;Yemen".split(";"));
+ this.knownCountries.add("zm;Zambia".split(";"));
+ this.knownCountries.add("zw;Zimbabwe".split(";"));
+ }
+
+ public List<String[]> getCountryList() {
+ return this.knownCountries;
+ }
+}
+
diff --git a/src/org/torproject/ernie/web/graphs/CsvServlet.java b/src/org/torproject/ernie/web/graphs/CsvServlet.java
new file mode 100644
index 0000000..40e3bea
--- /dev/null
+++ b/src/org/torproject/ernie/web/graphs/CsvServlet.java
@@ -0,0 +1,97 @@
+/* Copyright 2011, 2012 The Tor Project
+ * See LICENSE for licensing information */
+package org.torproject.ernie.web.graphs;
+
+import java.io.IOException;
+import java.util.SortedSet;
+import java.util.logging.Logger;
+
+import javax.servlet.ServletException;
+import javax.servlet.http.HttpServlet;
+import javax.servlet.http.HttpServletRequest;
+import javax.servlet.http.HttpServletResponse;
+
+/**
+ * Servlet that reads an HTTP request for a comma-separated value file,
+ * asks the GraphGenerator to generate this file, and returns it to the
+ * client.
+ */
+public class CsvServlet extends HttpServlet {
+
+ private static final long serialVersionUID = 7501442926823719958L;
+
+ private RObjectGenerator rObjectGenerator;
+
+ /* Available CSV files. */
+ private SortedSet<String> availableCsvFiles;
+
+ private Logger logger;
+
+ public void init() {
+
+ /* Initialize logger. */
+ this.logger = Logger.getLogger(CsvServlet.class.toString());
+
+ /* Get a reference to the R object generator that we need to generate
+ * CSV files. */
+ this.rObjectGenerator = (RObjectGenerator) getServletContext().
+ getAttribute("RObjectGenerator");
+ this.availableCsvFiles = rObjectGenerator.getAvailableCsvFiles();
+ }
+
+ public void doGet(HttpServletRequest request,
+ HttpServletResponse response) throws IOException,
+ ServletException {
+
+ /* Check if the directory listing was requested. */
+ String requestURI = request.getRequestURI();
+ if (requestURI.equals("/ernie/csv/")) {
+ request.setAttribute("directory", "/csv");
+ request.setAttribute("extension", ".csv");
+ request.setAttribute("files", this.availableCsvFiles);
+ request.getRequestDispatcher("/WEB-INF/dir.jsp").forward(request,
+ response);
+ return;
+ }
+
+ /* Find out which CSV file was requested and make sure we know this
+ * CSV file type. */
+ String requestedCsvFile = requestURI;
+ if (requestedCsvFile.endsWith(".csv")) {
+ requestedCsvFile = requestedCsvFile.substring(0,
+ requestedCsvFile.length() - ".csv".length());
+ }
+ if (requestedCsvFile.contains("/")) {
+ requestedCsvFile = requestedCsvFile.substring(requestedCsvFile.
+ lastIndexOf("/") + 1);
+ }
+ if (!availableCsvFiles.contains(requestedCsvFile)) {
+ logger.info("Did not recognize requested .csv file from request "
+ + "URI: '" + requestURI + "'. Responding with 404 Not Found.");
+ response.sendError(HttpServletResponse.SC_NOT_FOUND);
+ return;
+ }
+ logger.fine("CSV file '" + requestedCsvFile + ".csv' requested.");
+
+ /* Request CSV file from R object generator, which asks Rserve to
+ * generate it. */
+ RObject csvFile = this.rObjectGenerator.generateCsv(
+ requestedCsvFile, true);
+
+ /* Make sure that we have a .csv file to return. */
+ if (csvFile == null) {
+ response.sendError(HttpServletResponse.SC_INTERNAL_SERVER_ERROR);
+ return;
+ }
+
+ /* Write CSV file to response. */
+ String csvFileContent = new String(csvFile.getBytes());
+ response.setContentType("text/csv");
+ response.setHeader("Content-Length", String.valueOf(
+ csvFileContent.length()));
+ response.setHeader("Content-Disposition",
+ "inline; filename=\"" + requestedCsvFile + ".csv\"");
+ response.getWriter().print(csvFileContent);
+ }
+}
+
diff --git a/src/org/torproject/ernie/web/graphs/GraphDataServlet.java b/src/org/torproject/ernie/web/graphs/GraphDataServlet.java
new file mode 100644
index 0000000..0e35bdc
--- /dev/null
+++ b/src/org/torproject/ernie/web/graphs/GraphDataServlet.java
@@ -0,0 +1,279 @@
+/* Copyright 2011, 2012 The Tor Project
+ * See LICENSE for licensing information */
+package org.torproject.ernie.web.graphs;
+
+import java.io.BufferedReader;
+import java.io.IOException;
+import java.io.StringReader;
+import java.text.ParseException;
+import java.text.SimpleDateFormat;
+import java.util.HashMap;
+import java.util.Map;
+import java.util.SortedMap;
+import java.util.SortedSet;
+import java.util.TimeZone;
+import java.util.TreeMap;
+import java.util.TreeSet;
+import java.util.logging.Logger;
+
+import javax.servlet.ServletException;
+import javax.servlet.http.HttpServlet;
+import javax.servlet.http.HttpServletRequest;
+import javax.servlet.http.HttpServletResponse;
+
+/**
+ * Servlet that reads an HTTP request for a JSON-formatted graph data
+ * document, asks the RObjectGenerator to generate the CSV file behind it,
+ * converts it to a JSON object, and returns it to the client.
+ */
+public class GraphDataServlet extends HttpServlet {
+
+ private static final long serialVersionUID = 1799558498301134024L;
+
+ private RObjectGenerator rObjectGenerator;
+
+ /* Available graph data files. */
+ private SortedMap<String, String> availableGraphDataFiles;
+
+ /* Variable columns in CSV files that are in long form, not wide. */
+ private SortedMap<String, String> variableColumns;
+
+ /* Value columns in CSV files if only specific value columns shall be
+ * included in results. */
+ private SortedMap<String, String> valueColumns;
+
+ private Logger logger;
+
+ public void init() {
+
+ /* Initialize logger. */
+ this.logger = Logger.getLogger(GraphDataServlet.class.toString());
+
+ /* Initialize map of available graph data files and corresponding CSV
+ * files. */
+ this.availableGraphDataFiles = new TreeMap<String, String>();
+ this.availableGraphDataFiles.put("relays", "networksize");
+ this.availableGraphDataFiles.put("bridges", "networksize");
+ this.availableGraphDataFiles.put("cloudbridges", "cloudbridges");
+ this.availableGraphDataFiles.put("relays-by-country",
+ "relaycountries");
+ this.availableGraphDataFiles.put("relays-by-flags", "relayflags");
+ this.availableGraphDataFiles.put("relays-by-version", "versions");
+ this.availableGraphDataFiles.put("relays-by-platform", "platforms");
+ this.availableGraphDataFiles.put("relay-bandwidth", "bandwidth");
+ this.availableGraphDataFiles.put("relay-dir-bandwidth", "dirbytes");
+ this.availableGraphDataFiles.put("relay-bandwidth-history-by-flags",
+ "bwhist-flags");
+ this.availableGraphDataFiles.put("relay-bandwidth-by-flags",
+ "bandwidth-flags");
+ this.availableGraphDataFiles.put("direct-users-by-country",
+ "direct-users");
+ this.availableGraphDataFiles.put("bridge-users-by-country",
+ "bridge-users");
+ this.availableGraphDataFiles.put("torperf", "torperf");
+
+ /* Initialize map of graphs with specific variable columns. */
+ this.variableColumns = new TreeMap<String, String>();
+ this.variableColumns.put("relays-by-country", "country");
+ this.variableColumns.put("relay-bandwidth-history-by-flags",
+ "isexit,isguard");
+ this.variableColumns.put("torperf", "source");
+
+ /* Initialize map of graphs with specific value columns. */
+ this.valueColumns = new TreeMap<String, String>();
+ this.valueColumns.put("relays", "relays");
+ this.valueColumns.put("bridges", "bridges");
+
+ /* Get a reference to the R object generator that we need to generate
+ * CSV files. */
+ this.rObjectGenerator = (RObjectGenerator) getServletContext().
+ getAttribute("RObjectGenerator");
+ }
+
+ public void doGet(HttpServletRequest request,
+ HttpServletResponse response) throws IOException,
+ ServletException {
+
+ /* Check if the directory listing was requested. */
+ String requestURI = request.getRequestURI();
+ if (requestURI.equals("/ernie/graphs/")) {
+ request.setAttribute("directory", "/graphs");
+ request.setAttribute("extension", "");
+ request.setAttribute("files",
+ this.availableGraphDataFiles.keySet());
+ request.getRequestDispatcher("/WEB-INF/dir.jsp").forward(request,
+ response);
+ return;
+ }
+
+ /* Find out which JSON file was requested and make sure we know this
+ * JSON file type. */
+ String requestedJsonFile = request.getRequestURI();
+ if (requestedJsonFile.contains("/")) {
+ requestedJsonFile = requestedJsonFile.substring(requestedJsonFile.
+ lastIndexOf("/") + 1);
+ }
+ if (!availableGraphDataFiles.containsKey(requestedJsonFile)) {
+ logger.info("Did not recognize requested .csv file from request "
+ + "URI: '" + request.getRequestURI() + "'. Responding with 404 "
+ + "Not Found.");
+ response.sendError(HttpServletResponse.SC_NOT_FOUND);
+ return;
+ }
+ String requestedCsvFile = this.availableGraphDataFiles.get(
+ requestedJsonFile);
+ logger.fine("CSV file '" + requestedCsvFile + ".csv' requested.");
+
+ /* Request CSV file from R object generator, which may ask Rserve to
+ * generate it. */
+ RObject csvFile = this.rObjectGenerator.generateCsv(requestedCsvFile,
+ true);
+
+ /* Make sure that we have a CSV to convert into JSON. */
+ if (csvFile == null) {
+ response.sendError(HttpServletResponse.SC_INTERNAL_SERVER_ERROR);
+ return;
+ }
+
+ /* Look up if we converted this CSV to JSON format before. If not,
+ * convert it now. */
+ String jsonString;
+ if (!this.lastConvertedCsvFile.containsKey(requestedJsonFile) ||
+ this.lastConvertedCsvFile.get(requestedJsonFile) <
+ csvFile.getLastModified()) {
+ jsonString = this.convertCsvToJson(requestedJsonFile,
+ new String(csvFile.getBytes()));
+ this.lastConvertedCsvFile.put(requestedJsonFile,
+ csvFile.getLastModified());
+ this.convertedCsvFiles.put(requestedJsonFile, jsonString);
+ } else {
+ jsonString = this.convertedCsvFiles.get(requestedJsonFile);
+ }
+
+ /* Make sure we have a JSON string to return. */
+ if (jsonString == null) {
+ response.sendError(HttpServletResponse.SC_INTERNAL_SERVER_ERROR);
+ return;
+ }
+
+ /* Write JSON string to response. */
+ response.setHeader("Access-Control-Allow-Origin", "*");
+ response.setContentType("application/json");
+ response.setCharacterEncoding("utf-8");
+ response.getWriter().print(jsonString);
+ }
+
+ private Map<String, Long> lastConvertedCsvFile =
+ new HashMap<String, Long>();
+ private Map<String, String> convertedCsvFiles =
+ new HashMap<String, String>();
+ private String convertCsvToJson(String requestedJsonFile,
+ String csvFileContent) {
+ String jsonString = null;
+ try {
+ BufferedReader br = new BufferedReader(new StringReader(
+ csvFileContent));
+ String line;
+ String[] columns = null;
+ int dateCol = -1;
+ SortedSet<Integer> variableCols = new TreeSet<Integer>();
+ SortedSet<Integer> valueCols = new TreeSet<Integer>();
+ if ((line = br.readLine()) != null) {
+ columns = line.split(",");
+ for (int i = 0; i < columns.length; i++) {
+ if (columns[i].equals("date")) {
+ dateCol = i;
+ } else if (this.variableColumns.containsKey(requestedJsonFile)
+ && this.variableColumns.get(requestedJsonFile).contains(
+ columns[i])) {
+ variableCols.add(i);
+ } else if (!this.valueColumns.containsKey(requestedJsonFile) ||
+ this.valueColumns.get(requestedJsonFile).contains(
+ columns[i])) {
+ valueCols.add(i);
+ }
+ }
+ }
+ if (columns == null || dateCol < 0 || valueCols.isEmpty()) {
+ return null;
+ }
+ SortedMap<String, SortedSet<String>> graphs =
+ new TreeMap<String, SortedSet<String>>();
+ while ((line = br.readLine()) != null) {
+ String[] elements = line.split(",");
+ if (elements.length != columns.length) {
+ return null;
+ }
+ String date = elements[dateCol];
+ String variable = "";
+ if (!variableCols.isEmpty()) {
+ for (int variableCol : variableCols) {
+ String variableString = elements[variableCol];
+ if (variableString.equals("TRUE")) {
+ variable += columns[variableCol] + "_";
+ } else if (variableString.equals("FALSE")) {
+ variable += "not" + columns[variableCol] + "_";
+ } else {
+ variable += variableString + "_";
+ }
+ }
+ }
+ for (int valueCol : valueCols) {
+ if (elements[valueCol].equals("NA")) {
+ continue;
+ }
+ String graphName = variable + columns[valueCol];
+ if (!graphs.containsKey(graphName)) {
+ graphs.put(graphName, new TreeSet<String>());
+ }
+ String dateAndValue = date + "=" + elements[valueCol];
+ graphs.get(graphName).add(dateAndValue);
+ }
+ }
+ StringBuilder sb = new StringBuilder();
+ SimpleDateFormat dateFormat = new SimpleDateFormat("yyyy-MM-dd");
+ dateFormat.setTimeZone(TimeZone.getTimeZone("UTC"));
+ for (Map.Entry<String, SortedSet<String>> e : graphs.entrySet()) {
+ String graphName = e.getKey();
+ SortedSet<String> datesAndValues = e.getValue();
+ if (datesAndValues.isEmpty()) {
+ continue;
+ }
+ String[] firstDateAndValue = datesAndValues.first().split("=");
+ String firstDate = firstDateAndValue[0];
+ String lastDate = datesAndValues.last().split("=")[0];
+ sb.append(",\n\"" + graphName + "\":{"
+ + "\"first\":\"" + firstDate + "\","
+ + "\"last\":\"" + lastDate + "\","
+ + "\"values\":[");
+ int written = 0;
+ String previousDate = firstDate;
+ long previousDateMillis = dateFormat.parse(previousDate).
+ getTime();
+ for (String dateAndValue : datesAndValues) {
+ String parts[] = dateAndValue.split("=");
+ String date = parts[0];
+ long dateMillis = dateFormat.parse(date).getTime();
+ String value = parts[1];
+ while (dateMillis - 86400L * 1000L > previousDateMillis) {
+ sb.append((written++ > 0 ? "," : "") + "null");
+ previousDateMillis += 86400L * 1000L;
+ previousDate = dateFormat.format(previousDateMillis);
+ }
+ sb.append((written++ > 0 ? "," : "") + value);
+ previousDate = date;
+ previousDateMillis = dateMillis;
+ }
+ sb.append("]}");
+ }
+ br.close();
+ jsonString = "[" + sb.toString().substring(1) + "\n]";
+ } catch (IOException e) {
+ return null;
+ } catch (ParseException e) {
+ return null;
+ }
+ return jsonString;
+ }
+}
+
diff --git a/src/org/torproject/ernie/web/graphs/GraphImageServlet.java b/src/org/torproject/ernie/web/graphs/GraphImageServlet.java
new file mode 100644
index 0000000..b7d0b17
--- /dev/null
+++ b/src/org/torproject/ernie/web/graphs/GraphImageServlet.java
@@ -0,0 +1,76 @@
+/* Copyright 2011, 2012 The Tor Project
+ * See LICENSE for licensing information */
+package org.torproject.ernie.web.graphs;
+
+import java.io.BufferedOutputStream;
+import java.io.IOException;
+
+import javax.servlet.ServletException;
+import javax.servlet.http.HttpServlet;
+import javax.servlet.http.HttpServletRequest;
+import javax.servlet.http.HttpServletResponse;
+
+/**
+ * Servlet that reads an HTTP request for a graph image, asks the
+ * RObjectGenerator to generate this graph if it's not in the cache, and
+ * returns the image bytes to the client.
+ */
+public class GraphImageServlet extends HttpServlet {
+
+ private static final long serialVersionUID = -7356818641689744288L;
+
+ private RObjectGenerator rObjectGenerator;
+
+ public void init() {
+
+ /* Get a reference to the R object generator that we need to generate
+ * graph images. */
+ this.rObjectGenerator = (RObjectGenerator) getServletContext().
+ getAttribute("RObjectGenerator");
+ }
+
+ public void doGet(HttpServletRequest request,
+ HttpServletResponse response) throws IOException,
+ ServletException {
+
+ /* Find out which graph type was requested and make sure we know this
+ * graph type and file type. */
+ String requestedGraph = request.getRequestURI();
+ String fileType = null;
+ if (requestedGraph.endsWith(".png") ||
+ requestedGraph.endsWith(".pdf") ||
+ requestedGraph.endsWith(".svg")) {
+ fileType = requestedGraph.substring(requestedGraph.length() - 3);
+ requestedGraph = requestedGraph.substring(0, requestedGraph.length()
+ - 4);
+ }
+ if (requestedGraph.contains("/")) {
+ requestedGraph = requestedGraph.substring(requestedGraph.
+ lastIndexOf("/") + 1);
+ }
+
+ /* Request graph from R object generator, which either returns it from
+ * its cache or asks Rserve to generate it. */
+ RObject graph = rObjectGenerator.generateGraph(requestedGraph,
+ fileType, request.getParameterMap(), true);
+
+ /* Make sure that we have a graph to return. */
+ if (graph == null || graph.getBytes() == null || fileType == null) {
+ response.sendError(HttpServletResponse.SC_INTERNAL_SERVER_ERROR);
+ return;
+ }
+
+ /* Write graph bytes to response. */
+ BufferedOutputStream output = null;
+ response.setContentType("image/" + fileType);
+ response.setHeader("Content-Length",
+ String.valueOf(graph.getBytes().length));
+ response.setHeader("Content-Disposition",
+ "inline; filename=\"" + graph.getFileName() + "\"");
+ output = new BufferedOutputStream(response.getOutputStream(), 1024);
+ output.write(graph.getBytes(), 0, graph.getBytes().length);
+ output.flush();
+ output.close();
+ }
+}
+
diff --git a/src/org/torproject/ernie/web/graphs/GraphParameterChecker.java b/src/org/torproject/ernie/web/graphs/GraphParameterChecker.java
new file mode 100644
index 0000000..c22a845
--- /dev/null
+++ b/src/org/torproject/ernie/web/graphs/GraphParameterChecker.java
@@ -0,0 +1,297 @@
+/* Copyright 2011, 2012 The Tor Project
+ * See LICENSE for licensing information */
+package org.torproject.ernie.web.graphs;
+
+import java.text.ParseException;
+import java.text.SimpleDateFormat;
+import java.util.Arrays;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+import java.util.TimeZone;
+import java.util.regex.Pattern;
+
+/**
+ * Checks request parameters passed to graph-generating servlets.
+ */
+public class GraphParameterChecker {
+
+ /**
+ * Singleton instance of this class.
+ */
+ private static GraphParameterChecker instance =
+ new GraphParameterChecker();
+
+ /**
+ * Returns the singleton instance of this class.
+ */
+ public static GraphParameterChecker getInstance() {
+ return instance;
+ }
+
+ /* Date format for parsing start and end dates. */
+ private SimpleDateFormat dateFormat;
+
+ /* Available graphs with corresponding parameter lists. */
+ private Map<String, String> availableGraphs;
+
+ /* Known parameters and parameter values. */
+ private Map<String, String> knownParameterValues;
+
+ /**
+ * Initializes map with valid parameters for each of the graphs.
+ */
+ public GraphParameterChecker() {
+ this.dateFormat = new SimpleDateFormat("yyyy-MM-dd");
+ this.dateFormat.setTimeZone(TimeZone.getTimeZone("UTC"));
+
+ this.knownParameterValues = new HashMap<String, String>();
+ this.knownParameterValues.put("flag",
+ "Running,Exit,Guard,Fast,Stable");
+ this.knownParameterValues.put("granularity", "day");
+ StringBuilder sb = new StringBuilder("all");
+ for (String[] country : Countries.getInstance().getCountryList()) {
+ sb.append("," + country[0]);
+ }
+ this.knownParameterValues.put("country", sb.toString());
+ this.knownParameterValues.put("events", "on,off,points");
+ this.knownParameterValues.put("language", "all,en,zh_CN,fa");
+ this.knownParameterValues.put("source", "all,siv,moria,torperf");
+ this.knownParameterValues.put("filesize", "50kb,1mb,5mb");
+ }
+
+ public void setAvailableGraphs(Map<String, String> availableGraphs) {
+ this.availableGraphs = availableGraphs;
+ }
+
+ /**
+ * Checks request parameters for the given graph type and returns a map
+ * of recognized parameters, or null if the graph type doesn't exist or
+ * the parameters are invalid.
+ */
+ public Map<String, String[]> checkParameters(String graphType,
+ Map requestParameters) {
+
+ /* Check if the graph type exists. */
+ if (graphType == null ||
+ !this.availableGraphs.containsKey(graphType)) {
+ return null;
+ }
+
+ /* Find out which other parameters are supported by this graph type
+ * and parse them if they are given. */
+ Set<String> supportedGraphParameters = new HashSet<String>(Arrays.
+ asList(this.availableGraphs.get(graphType).split(",")));
+ Map<String, String[]> recognizedGraphParameters =
+ new HashMap<String, String[]>();
+
+ /* Parse start and end dates if supported by the graph type. If no end
+ * date is provided, set it to today. If no start date is provided,
+ * set it to 90 days before the end date. Make sure that start date
+ * precedes end date. */
+ if (supportedGraphParameters.contains("start") ||
+ supportedGraphParameters.contains("end")) {
+ String[] startParameter = (String[]) requestParameters.get("start");
+ String[] endParameter = (String[]) requestParameters.get("end");
+ long endTimestamp = System.currentTimeMillis();
+ if (endParameter != null && endParameter.length > 0 &&
+ endParameter[0].length() > 0) {
+ try {
+ endTimestamp = dateFormat.parse(endParameter[0]).getTime();
+ } catch (ParseException e) {
+ return null;
+ }
+ if (!endParameter[0].startsWith("20")) {
+ return null;
+ }
+ }
+ endParameter = new String[] { dateFormat.format(endTimestamp) };
+ long startTimestamp = endTimestamp - 90L * 24L * 60L * 60L * 1000L;
+ if (startParameter != null && startParameter.length > 0 &&
+ startParameter[0].length() > 0) {
+ try {
+ startTimestamp = dateFormat.parse(startParameter[0]).getTime();
+ } catch (ParseException e) {
+ return null;
+ }
+ if (!startParameter[0].startsWith("20")) {
+ return null;
+ }
+ }
+ startParameter = new String[] { dateFormat.format(startTimestamp) };
+ if (startTimestamp > endTimestamp) {
+ return null;
+ }
+ recognizedGraphParameters.put("start", startParameter);
+ recognizedGraphParameters.put("end", endParameter);
+ }
+
+ /* Parse relay flags if supported by the graph type. If no relay flags
+ * are passed or none of them have been recognized, use the set of all
+ * known flags as default. */
+ if (supportedGraphParameters.contains("flag")) {
+ String[] flagParameters = (String[]) requestParameters.get("flag");
+ List<String> knownFlags = Arrays.asList(
+ this.knownParameterValues.get("flag").split(","));
+ if (flagParameters != null) {
+ for (String flag : flagParameters) {
+ if (flag == null || flag.length() == 0 ||
+ !knownFlags.contains(flag)) {
+ return null;
+ }
+ }
+ } else {
+ flagParameters = this.knownParameterValues.get("flag").split(",");
+ }
+ recognizedGraphParameters.put("flag", flagParameters);
+ }
+
+ /* Parse granularity, which can be 1 day or 1 hour, if supported by
+ * the graph type. The default is 1 day. */
+ if (supportedGraphParameters.contains("granularity")) {
+ String[] granularityParameter = (String[]) requestParameters.get(
+ "granularity");
+ List<String> knownGranularities = Arrays.asList(
+ this.knownParameterValues.get("granularity").split(","));
+ if (granularityParameter != null) {
+ if (granularityParameter.length != 1 ||
+ granularityParameter[0] == null ||
+ !knownGranularities.contains(granularityParameter[0])) {
+ return null;
+ }
+ } else {
+ granularityParameter = new String[] { "day" };
+ }
+ recognizedGraphParameters.put("granularity", granularityParameter);
+ }
+
+ /* Parse country codes if supported by the graph type. If no countries
+ * are passed, use country code "all" (all countries) as default. */
+ if (supportedGraphParameters.contains("country")) {
+ String[] countryParameters = (String[]) requestParameters.get(
+ "country");
+ List<String> knownCountries = Arrays.asList(
+ this.knownParameterValues.get("country").split(","));
+ if (countryParameters != null) {
+ for (String country : countryParameters) {
+ if (country == null || country.length() == 0 ||
+ !knownCountries.contains(country)) {
+ return null;
+ }
+ }
+ } else {
+ countryParameters = new String[] { "all" };
+ }
+ recognizedGraphParameters.put("country", countryParameters);
+ }
+
+ /* Parse whether the estimated min/max range shall be displayed if
+ * supported by the graph type. This parameter can either be "on" or
+ * "off," where "off" is the default. */
+ if (supportedGraphParameters.contains("events")) {
+ String[] eventsParameter = (String[]) requestParameters.get(
+ "events");
+ List<String> knownRanges = Arrays.asList(
+ this.knownParameterValues.get("events").split(","));
+ if (eventsParameter != null) {
+ if (eventsParameter.length != 1 ||
+ eventsParameter[0].length() == 0 ||
+ !knownRanges.contains(eventsParameter[0])) {
+ return null;
+ }
+ } else {
+ eventsParameter = new String[] { "off" };
+ }
+ recognizedGraphParameters.put("events", eventsParameter);
+ }
+
+ /* Parse language if supported by the graph type. Only a single
+ * language can be passed. If no language is passed, use "all" as
+ * default. */
+ if (supportedGraphParameters.contains("language")) {
+ String[] languageParameter = (String[]) requestParameters.get(
+ "language");
+ List<String> knownBundles = Arrays.asList(
+ this.knownParameterValues.get("language").split(","));
+ if (languageParameter != null) {
+ if (languageParameter.length != 1 ||
+ languageParameter[0].length() == 0 ||
+ !knownBundles.contains(languageParameter[0])) {
+ return null;
+ }
+ } else {
+ languageParameter = new String[] { "all" };
+ }
+ recognizedGraphParameters.put("language", languageParameter);
+ }
+
+ /* Parse torperf data source if supported by the graph type. Only a
+ * single source can be passed. If no source is passed, use "torperf"
+ * as default. */
+ if (supportedGraphParameters.contains("source")) {
+ String[] sourceParameter = (String[]) requestParameters.get(
+ "source");
+ List<String> knownSources = Arrays.asList(
+ this.knownParameterValues.get("source").split(","));
+ if (sourceParameter != null) {
+ if (sourceParameter.length != 1) {
+ return null;
+ }
+ if (sourceParameter[0].length() == 0 ||
+ !knownSources.contains(sourceParameter[0])) {
+ return null;
+ }
+ } else {
+ sourceParameter = new String[] { "all" };
+ }
+ recognizedGraphParameters.put("source", sourceParameter);
+ }
+
+ /* Parse torperf file size if supported by the graph type. Only a
+ * single file size can be passed. If no file size is passed, use
+ * "50kb" as default. */
+ if (supportedGraphParameters.contains("filesize")) {
+ String[] filesizeParameter = (String[]) requestParameters.get(
+ "filesize");
+ List<String> knownFilesizes = Arrays.asList(
+ this.knownParameterValues.get("filesize").split(","));
+ if (filesizeParameter != null) {
+ if (filesizeParameter.length != 1) {
+ return null;
+ }
+ if (filesizeParameter[0].length() == 0 ||
+ !knownFilesizes.contains(filesizeParameter[0])) {
+ return null;
+ }
+ } else {
+ filesizeParameter = new String[] { "50kb" };
+ }
+ recognizedGraphParameters.put("filesize", filesizeParameter);
+ }
+
+ /* Parse fingerprint if supported/required by the graph type. Make
+ * sure the fingerprint contains only hexadecimal characters and is 40
+ * characters long. Fail if no fingerprint is provided! */
+ if (supportedGraphParameters.contains("fingerprint")) {
+ String[] fingerprintParameter = (String[]) requestParameters.get(
+ "fingerprint");
+ if (fingerprintParameter == null ||
+ fingerprintParameter.length != 1 ||
+ fingerprintParameter[0] == null ||
+ !Pattern.matches("[0-9a-f]{40}",
+ fingerprintParameter[0].toLowerCase())) {
+ return null;
+ } else {
+ fingerprintParameter[0] = fingerprintParameter[0].toLowerCase();
+ recognizedGraphParameters.put("fingerprint",
+ fingerprintParameter);
+ }
+ }
+
+ /* We now have a map with all required graph parameters. Return it. */
+ return recognizedGraphParameters;
+ }
+}
+
diff --git a/src/org/torproject/ernie/web/graphs/GraphsSubpagesServlet.java b/src/org/torproject/ernie/web/graphs/GraphsSubpagesServlet.java
new file mode 100644
index 0000000..c522bfa
--- /dev/null
+++ b/src/org/torproject/ernie/web/graphs/GraphsSubpagesServlet.java
@@ -0,0 +1,163 @@
+/* Copyright 2011, 2012 The Tor Project
+ * See LICENSE for licensing information */
+package org.torproject.ernie.web.graphs;
+
+import java.io.IOException;
+import java.text.SimpleDateFormat;
+import java.util.Arrays;
+import java.util.Date;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+import java.util.TimeZone;
+
+import javax.servlet.ServletException;
+import javax.servlet.http.HttpServlet;
+import javax.servlet.http.HttpServletRequest;
+import javax.servlet.http.HttpServletResponse;
+
+public class GraphsSubpagesServlet extends HttpServlet {
+
+ private static final long serialVersionUID = -5959829347747628403L;
+
+ /* Available graphs subpages with corresponding JSP to which requests
+ * are forwarded. */
+ private Map<String, String> availableGraphsSubpages;
+
+ /* Available tables on graphs subpages. */
+ private Map<String, Set<String>> availableGraphsSubpageTables;
+
+ /* Country codes and names for per-country graphs. */
+ private List<String[]> knownCountries;
+
+ /* R object generator for generating table data. */
+ private RObjectGenerator rObjectGenerator;
+
+ public GraphsSubpagesServlet() {
+ this.availableGraphsSubpages = new HashMap<String, String>();
+ this.availableGraphsSubpages.put("network.html",
+ "WEB-INF/network.jsp");
+ this.availableGraphsSubpages.put("fast-exits.html",
+ "WEB-INF/fast-exits.jsp");
+ this.availableGraphsSubpages.put("users.html", "WEB-INF/users.jsp");
+ this.availableGraphsSubpages.put("packages.html",
+ "WEB-INF/packages.jsp");
+ this.availableGraphsSubpages.put("performance.html",
+ "WEB-INF/performance.jsp");
+
+ this.availableGraphsSubpageTables =
+ new HashMap<String, Set<String>>();
+ this.availableGraphsSubpageTables.put("users.html",
+ new HashSet<String>(Arrays.asList(
+ "direct-users,censorship-events".split(","))));
+
+ this.knownCountries = Countries.getInstance().getCountryList();
+ }
+
+ public void init() {
+ /* Get a reference to the R object generator that we need to generate
+ * table data. */
+ this.rObjectGenerator = (RObjectGenerator) getServletContext().
+ getAttribute("RObjectGenerator");
+ }
+
+ public void doGet(HttpServletRequest request,
+ HttpServletResponse response) throws IOException, ServletException {
+
+ /* Find out which graph subpage was requested and look up which JSP
+ * handles this subpage. */
+ String requestedPage = request.getRequestURI();
+ if (requestedPage == null) {
+ response.sendError(HttpServletResponse.SC_BAD_REQUEST);
+ return;
+ }
+ if (requestedPage.contains("/")) {
+ requestedPage = requestedPage.substring(requestedPage.
+ lastIndexOf("/") + 1);
+ }
+ if (!availableGraphsSubpages.containsKey(requestedPage)) {
+ response.sendError(HttpServletResponse.SC_INTERNAL_SERVER_ERROR);
+ return;
+ }
+ String jsp = availableGraphsSubpages.get(requestedPage);
+
+ /* Find out which graph or table type was requested, if any. */
+ String requestedGraph = request.getParameter("graph");
+ String requestedTable = request.getParameter("table");
+ if (requestedGraph != null) {
+
+ /* Check if the passed parameters are valid. */
+ Map<String, String[]> checkedParameters = GraphParameterChecker.
+ getInstance().checkParameters(requestedGraph,
+ request.getParameterMap());
+ if (checkedParameters != null) {
+
+ /* Set the graph's attributes to the appropriate values, so that
+ * we can display the correct graph and prepopulate the form. */
+ StringBuilder urlBuilder = new StringBuilder();
+ for (Map.Entry<String, String[]> param :
+ checkedParameters.entrySet()) {
+ request.setAttribute(requestedGraph.replaceAll("-", "_") + "_"
+ + param.getKey(), param.getValue());
+ for (String paramValue : param.getValue()) {
+ urlBuilder.append("&" + param.getKey() + "=" + paramValue);
+ }
+ }
+ String url = "?" + urlBuilder.toString().substring(1);
+ request.setAttribute(requestedGraph.replaceAll("-", "_") + "_url",
+ url);
+ }
+ }
+ if (requestedTable != null) {
+
+ /* Check if the passed parameters are valid. */
+ Map<String, String[]> checkedParameters = TableParameterChecker.
+ getInstance().checkParameters(requestedTable,
+ request.getParameterMap());
+ if (checkedParameters != null) {
+
+ /* Set the table's attributes to the appropriate values, so that
+ * we can prepopulate the form. */
+ for (Map.Entry<String, String[]> param :
+ checkedParameters.entrySet()) {
+ request.setAttribute(requestedTable.replaceAll("-", "_") + "_"
+ + param.getKey(), param.getValue());
+ }
+ }
+ }
+
+ /* Generate table data if the graphs subpage has any tables,
+ * regardless of whether a table update was requested, and add the
+ * table data as request attribute. */
+ if (this.availableGraphsSubpageTables.containsKey(requestedPage)) {
+ for (String tableName :
+ this.availableGraphsSubpageTables.get(requestedPage)) {
+ List<Map<String, String>> tableData = rObjectGenerator.
+ generateTable(tableName, requestedTable,
+ request.getParameterMap(), true);
+ request.setAttribute(tableName.replaceAll("-", "_")
+ + "_tabledata", tableData);
+ }
+ }
+
+ /* Pass list of known countries in case we want to display them. */
+ request.setAttribute("countries", this.knownCountries);
+
+ /* Pass the default start and end dates. */
+ SimpleDateFormat dateFormat = new SimpleDateFormat("yyyy-MM-dd");
+ dateFormat.setTimeZone(TimeZone.getTimeZone("UTC"));
+ Date defaultEndDate = new Date();
+ Date defaultStartDate = new Date(defaultEndDate.getTime()
+ - 90L * 24L * 60L * 60L * 1000L);
+ request.setAttribute("default_start_date",
+ dateFormat.format(defaultStartDate));
+ request.setAttribute("default_end_date",
+ dateFormat.format(defaultEndDate));
+
+ /* Forward the request to the JSP that does all the hard work. */
+ request.getRequestDispatcher(jsp).forward(request, response);
+ }
+}
+
diff --git a/src/org/torproject/ernie/web/graphs/RObject.java b/src/org/torproject/ernie/web/graphs/RObject.java
new file mode 100644
index 0000000..cfab819
--- /dev/null
+++ b/src/org/torproject/ernie/web/graphs/RObject.java
@@ -0,0 +1,23 @@
+/* Copyright 2011, 2012 The Tor Project
+ * See LICENSE for licensing information */
+package org.torproject.ernie.web.graphs;
+
+public class RObject {
+ private byte[] bytes;
+ private String fileName;
+ private long lastModified;
+ public RObject(byte[] bytes, String fileName, long lastModified) {
+ this.bytes = bytes;
+ this.fileName = fileName;
+ this.lastModified = lastModified;
+ }
+ public String getFileName() {
+ return this.fileName;
+ }
+ public byte[] getBytes() {
+ return this.bytes;
+ }
+ public long getLastModified() {
+ return this.lastModified;
+ }
+}
diff --git a/src/org/torproject/ernie/web/graphs/RObjectGenerator.java b/src/org/torproject/ernie/web/graphs/RObjectGenerator.java
new file mode 100644
index 0000000..f678ff4
--- /dev/null
+++ b/src/org/torproject/ernie/web/graphs/RObjectGenerator.java
@@ -0,0 +1,394 @@
+/* Copyright 2011, 2012 The Tor Project
+ * See LICENSE for licensing information */
+package org.torproject.ernie.web.graphs;
+
+import java.io.BufferedInputStream;
+import java.io.BufferedReader;
+import java.io.ByteArrayInputStream;
+import java.io.ByteArrayOutputStream;
+import java.io.File;
+import java.io.FileInputStream;
+import java.io.IOException;
+import java.io.InputStreamReader;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+import java.util.SortedSet;
+import java.util.TreeSet;
+
+import javax.servlet.ServletContext;
+import javax.servlet.ServletContextEvent;
+import javax.servlet.ServletContextListener;
+
+import org.rosuda.REngine.Rserve.RConnection;
+import org.rosuda.REngine.Rserve.RserveException;
+
+public class RObjectGenerator implements ServletContextListener {
+
+ /* Host and port where Rserve is listening. */
+ private String rserveHost;
+ private int rservePort;
+
+ /* Some parameters for our cache of graph images. */
+ private String cachedGraphsDirectory;
+ private long maxCacheAge;
+
+ private SortedSet<String> availableCsvFiles;
+ private Map<String, String> availableTables;
+ private Map<String, String> availableGraphs;
+ private Set<String> availableGraphFileTypes;
+
+ public void contextInitialized(ServletContextEvent event) {
+
+ /* Initialize using context parameters. */
+ ServletContext servletContext = event.getServletContext();
+ this.rserveHost = servletContext.getInitParameter("rserveHost");
+ this.rservePort = Integer.parseInt(servletContext.getInitParameter(
+ "rservePort"));
+ this.maxCacheAge = Long.parseLong(servletContext.getInitParameter(
+ "maxCacheAge"));
+ this.cachedGraphsDirectory = servletContext.getInitParameter(
+ "cachedGraphsDir");
+
+ /* Initialize map of available CSV files. */
+ this.availableCsvFiles = new TreeSet<String>();
+ this.availableCsvFiles.add("bandwidth");
+ this.availableCsvFiles.add("bandwidth-flags");
+ this.availableCsvFiles.add("bridge-users");
+ this.availableCsvFiles.add("bwhist-flags");
+ this.availableCsvFiles.add("connbidirect");
+ this.availableCsvFiles.add("cloudbridges");
+ this.availableCsvFiles.add("direct-users");
+ this.availableCsvFiles.add("dirreq-stats");
+ this.availableCsvFiles.add("dirbytes");
+ this.availableCsvFiles.add("monthly-users-average");
+ this.availableCsvFiles.add("monthly-users-peak");
+ this.availableCsvFiles.add("networksize");
+ this.availableCsvFiles.add("platforms");
+ this.availableCsvFiles.add("relaycountries");
+ this.availableCsvFiles.add("relayflags");
+ this.availableCsvFiles.add("torperf");
+ this.availableCsvFiles.add("torperf-failures");
+ this.availableCsvFiles.add("versions");
+
+ this.availableTables = new HashMap<String, String>();
+ this.availableTables.put("direct-users", "start,end,filename");
+ this.availableTables.put("censorship-events", "start,end,filename");
+ TableParameterChecker.getInstance().setAvailableTables(
+ availableTables);
+
+ this.availableGraphs = new HashMap<String, String>();
+ this.availableGraphs.put("networksize", "start,end,filename");
+ this.availableGraphs.put("cloudbridges", "start,end,filename");
+ this.availableGraphs.put("relaycountries",
+ "start,end,country,filename");
+ this.availableGraphs.put("relayflags", "start,end,flag,granularity,"
+ + "filename");
+ this.availableGraphs.put("versions", "start,end,filename");
+ this.availableGraphs.put("platforms", "start,end,filename");
+ this.availableGraphs.put("bandwidth", "start,end,filename");
+ this.availableGraphs.put("bandwidth-flags", "start,end,filename");
+ this.availableGraphs.put("bwhist-flags", "start,end,filename");
+ this.availableGraphs.put("dirbytes", "start,end,filename");
+ this.availableGraphs.put("direct-users",
+ "start,end,country,events,filename");
+ this.availableGraphs.put("bridge-users",
+ "start,end,country,filename");
+ this.availableGraphs.put("torperf",
+ "start,end,source,filesize,filename");
+ this.availableGraphs.put("torperf-failures",
+ "start,end,source,filesize,filename");
+ this.availableGraphs.put("connbidirect", "start,end,filename");
+ this.availableGraphs.put("fast-exits", "start,end,filename");
+ this.availableGraphs.put("almost-fast-exits", "start,end,filename");
+ this.availableGraphFileTypes = new HashSet<String>(Arrays.asList(
+ "png,pdf,svg".split(",")));
+ GraphParameterChecker.getInstance().setAvailableGraphs(
+ availableGraphs);
+
+ /* Register ourself, so that servlets can use us. */
+ servletContext.setAttribute("RObjectGenerator", this);
+
+ /* Periodically generate R objects with default parameters. */
+ new Thread() {
+ public void run() {
+ long lastUpdated = 0L, sleep;
+ while (true) {
+ while ((sleep = maxCacheAge * 1000L / 2L + lastUpdated
+ - System.currentTimeMillis()) > 0L) {
+ try {
+ Thread.sleep(sleep);
+ } catch (InterruptedException e) {
+ }
+ }
+ for (String csvFile : availableCsvFiles) {
+ generateCsv(csvFile, false);
+ }
+ for (String tableName : availableTables.keySet()) {
+ generateTable(tableName, tableName, new HashMap(), false);
+ }
+ for (String graphName : availableGraphs.keySet()) {
+ for (String fileType : availableGraphFileTypes) {
+ generateGraph(graphName, fileType, new HashMap(), false);
+ }
+ }
+ lastUpdated = System.currentTimeMillis();
+ }
+ };
+ }.start();
+ }
+
+ public void contextDestroyed(ServletContextEvent event) {
+ /* Nothing to do. */
+ }
+
+ public RObject generateGraph(String requestedGraph, String fileType,
+ Map parameterMap, boolean checkCache) {
+ Map<String, String[]> checkedParameters = GraphParameterChecker.
+ getInstance().checkParameters(requestedGraph, parameterMap);
+ if (checkedParameters == null) {
+ /* TODO We're going to take the blame by sending an internal server
+ * error to the client, but really the user is to blame. */
+ return null;
+ }
+ StringBuilder rQueryBuilder = new StringBuilder("plot_"
+ + requestedGraph.replaceAll("-", "_") + "("),
+ imageFilenameBuilder = new StringBuilder(requestedGraph);
+ for (Map.Entry<String, String[]> parameter :
+ checkedParameters.entrySet()) {
+ String parameterName = parameter.getKey();
+ String[] parameterValues = parameter.getValue();
+ for (String param : parameterValues) {
+ imageFilenameBuilder.append("-" + param);
+ }
+ if (parameterValues.length < 2) {
+ rQueryBuilder.append(parameterName + " = '" + parameterValues[0]
+ + "', ");
+ } else {
+ rQueryBuilder.append(parameterName + " = c(");
+ for (int i = 0; i < parameterValues.length - 1; i++) {
+ rQueryBuilder.append("'" + parameterValues[i] + "', ");
+ }
+ rQueryBuilder.append("'" + parameterValues[
+ parameterValues.length - 1] + "'), ");
+ }
+ }
+ imageFilenameBuilder.append("." + fileType);
+ String imageFilename = imageFilenameBuilder.toString();
+ rQueryBuilder.append("path = '%s')");
+ String rQuery = rQueryBuilder.toString();
+ File imageFile = new File(this.cachedGraphsDirectory + "/"
+ + imageFilename);
+ return this.generateRObject(rQuery, imageFile, imageFilename,
+ checkCache);
+ }
+
+ public SortedSet<String> getAvailableCsvFiles() {
+ return this.availableCsvFiles;
+ }
+
+ public RObject generateCsv(String requestedCsvFile,
+ boolean checkCache) {
+ /* Prepare filename and R query string. */
+ String rQuery = "export_" + requestedCsvFile.replaceAll("-", "_")
+ + "(path = '%s')";
+ String csvFilename = requestedCsvFile + ".csv";
+
+ /* See if we need to generate this .csv file. */
+ File csvFile = new File(this.cachedGraphsDirectory + "/"
+ + csvFilename);
+ return this.generateRObject(rQuery, csvFile, csvFilename, checkCache);
+ }
+
+ public List<Map<String, String>> generateTable(String tableName,
+ String requestedTable, Map parameterMap, boolean checkCache) {
+
+ Map<String, String[]> checkedParameters = null;
+ if (tableName.equals(requestedTable)) {
+ checkedParameters = TableParameterChecker.
+ getInstance().checkParameters(requestedTable,
+ parameterMap);
+ } else {
+ checkedParameters = TableParameterChecker.
+ getInstance().checkParameters(tableName, null);
+ }
+ if (checkedParameters == null) {
+ /* TODO We're going to take the blame by sending an internal server
+ * error to the client, but really the user is to blame. */
+ return null;
+ }
+ StringBuilder rQueryBuilder = new StringBuilder("write_"
+ + tableName.replaceAll("-", "_") + "("),
+ tableFilenameBuilder = new StringBuilder(tableName);
+
+ for (Map.Entry<String, String[]> parameter :
+ checkedParameters.entrySet()) {
+ String parameterName = parameter.getKey();
+ String[] parameterValues = parameter.getValue();
+ for (String param : parameterValues) {
+ tableFilenameBuilder.append("-" + param);
+ }
+ if (parameterValues.length < 2) {
+ rQueryBuilder.append(parameterName + " = '"
+ + parameterValues[0] + "', ");
+ } else {
+ rQueryBuilder.append(parameterName + " = c(");
+ for (int i = 0; i < parameterValues.length - 1; i++) {
+ rQueryBuilder.append("'" + parameterValues[i] + "', ");
+ }
+ rQueryBuilder.append("'" + parameterValues[
+ parameterValues.length - 1] + "'), ");
+ }
+ }
+ tableFilenameBuilder.append(".tbl");
+ String tableFilename = tableFilenameBuilder.toString();
+ rQueryBuilder.append("path = '%s')");
+ String rQuery = rQueryBuilder.toString();
+ return this.generateTable(rQuery, tableFilename, checkCache);
+ }
+
+ /* Generate table data using the given R query and filename or read
+ * previously generated table data from disk if it's not too old and
+ * return table data. */
+ private List<Map<String, String>> generateTable(String rQuery,
+ String tableFilename, boolean checkCache) {
+
+ /* See if we need to generate this table. */
+ File tableFile = new File(this.cachedGraphsDirectory + "/"
+ + tableFilename);
+ byte[] tableBytes = this.generateRObject(rQuery, tableFile,
+ tableFilename, checkCache).getBytes();
+
+ /* Write the table content to a map. */
+ List<Map<String, String>> result = null;
+ try {
+ result = new ArrayList<Map<String, String>>();
+ BufferedReader br = new BufferedReader(new InputStreamReader(
+ new ByteArrayInputStream(tableBytes)));
+ String line = br.readLine();
+ if (line != null) {
+ List<String> headers = new ArrayList<String>(Arrays.asList(
+ line.split(",")));
+ while ((line = br.readLine()) != null) {
+ String[] parts = line.split(",");
+ if (headers.size() != parts.length) {
+ return null;
+ }
+ Map<String, String> row = new HashMap<String, String>();
+ for (int i = 0; i < headers.size(); i++) {
+ row.put(headers.get(i), parts[i]);
+ }
+ result.add(row);
+ }
+ }
+ } catch (IOException e) {
+ return null;
+ }
+
+ /* Return table values. */
+ return result;
+ }
+
+ /* Generate an R object in a separate worker thread, or wait for an
+ * already running worker thread to finish and get its result. */
+ private RObject generateRObject(String rQuery, File rObjectFile,
+ String fileName, boolean checkCache) {
+ RObjectGeneratorWorker worker = null;
+ synchronized (this.rObjectGeneratorThreads) {
+ if (this.rObjectGeneratorThreads.containsKey(rQuery)) {
+ worker = this.rObjectGeneratorThreads.get(rQuery);
+ } else {
+ worker = new RObjectGeneratorWorker(rQuery, rObjectFile,
+ fileName, checkCache);
+ this.rObjectGeneratorThreads.put(rQuery, worker);
+ worker.start();
+ }
+ }
+ try {
+ worker.join();
+ } catch (InterruptedException e) {
+ }
+ synchronized (this.rObjectGeneratorThreads) {
+ if (this.rObjectGeneratorThreads.containsKey(rQuery) &&
+ this.rObjectGeneratorThreads.get(rQuery) == worker) {
+ this.rObjectGeneratorThreads.remove(rQuery);
+ }
+ }
+ return worker.getRObject();
+ }
+
+ private Map<String, RObjectGeneratorWorker> rObjectGeneratorThreads =
+ new HashMap<String, RObjectGeneratorWorker>();
+
+ private class RObjectGeneratorWorker extends Thread {
+
+ private String rQuery;
+ private File rObjectFile;
+ private String fileName;
+ private boolean checkCache;
+ private RObject result = null;
+
+ public RObjectGeneratorWorker(String rQuery, File rObjectFile,
+ String fileName, boolean checkCache) {
+ this.rQuery = rQuery;
+ this.rObjectFile = rObjectFile;
+ this.fileName = fileName;
+ this.checkCache = checkCache;
+ }
+
+ public void run() {
+
+ /* See if we need to generate this R object. */
+ long now = System.currentTimeMillis();
+ if (!this.checkCache || !this.rObjectFile.exists() ||
+ this.rObjectFile.lastModified() < now - maxCacheAge * 1000L) {
+
+ /* We do. Update the R query to contain the absolute path to the
+ * file to be generated, create a connection to Rserve, run the R
+ * query, and close the connection. The generated object will be
+ * on disk. */
+ this.rQuery = String.format(this.rQuery,
+ this.rObjectFile.getAbsolutePath());
+ try {
+ RConnection rc = new RConnection(rserveHost, rservePort);
+ rc.eval(this.rQuery);
+ rc.close();
+ } catch (RserveException e) {
+ return;
+ }
+
+ /* Check that we really just generated the R object. */
+ if (!this.rObjectFile.exists() || this.rObjectFile.lastModified()
+ < now - maxCacheAge * 1000L) {
+ return;
+ }
+ }
+
+ /* Read the R object from disk and write it to a byte array. */
+ long lastModified = this.rObjectFile.lastModified();
+ try {
+ BufferedInputStream bis = new BufferedInputStream(
+ new FileInputStream(this.rObjectFile), 1024);
+ ByteArrayOutputStream baos = new ByteArrayOutputStream();
+ byte[] buffer = new byte[1024];
+ int length;
+ while ((length = bis.read(buffer)) > 0) {
+ baos.write(buffer, 0, length);
+ }
+ bis.close();
+ this.result = new RObject(baos.toByteArray(), this.fileName,
+ lastModified);
+ } catch (IOException e) {
+ return;
+ }
+ }
+
+ public RObject getRObject() {
+ return this.result;
+ }
+ }
+}
diff --git a/src/org/torproject/ernie/web/graphs/TableParameterChecker.java b/src/org/torproject/ernie/web/graphs/TableParameterChecker.java
new file mode 100644
index 0000000..02a0c85
--- /dev/null
+++ b/src/org/torproject/ernie/web/graphs/TableParameterChecker.java
@@ -0,0 +1,120 @@
+/* Copyright 2011, 2012 The Tor Project
+ * See LICENSE for licensing information */
+package org.torproject.ernie.web.graphs;
+
+import java.text.ParseException;
+import java.text.SimpleDateFormat;
+import java.util.Arrays;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.Map;
+import java.util.Set;
+import java.util.TimeZone;
+
+/**
+ * Checks request parameters passed to generate tables.
+ */
+public class TableParameterChecker {
+
+ /**
+ * Singleton instance of this class.
+ */
+ private static TableParameterChecker instance =
+ new TableParameterChecker();
+
+ /**
+ * Returns the singleton instance of this class.
+ */
+ public static TableParameterChecker getInstance() {
+ return instance;
+ }
+
+ /* Date format for parsing start and end dates. */
+ private SimpleDateFormat dateFormat;
+
+ /* Available tables with corresponding parameter lists. */
+ private Map<String, String> availableTables;
+
+ /**
+ * Initializes map with valid parameters for each of the graphs.
+ */
+ public TableParameterChecker() {
+ this.dateFormat = new SimpleDateFormat("yyyy-MM-dd");
+ this.dateFormat.setTimeZone(TimeZone.getTimeZone("UTC"));
+ }
+
+ public void setAvailableTables(Map<String, String> availableTables) {
+ this.availableTables = availableTables;
+ }
+
+ /**
+ * Checks request parameters for the given table type and returns a map
+ * of recognized parameters, or null if the table type doesn't exist or
+ * the parameters are invalid.
+ */
+ public Map<String, String[]> checkParameters(String tableType,
+ Map requestParameters) {
+
+ /* Check if the table type exists. */
+ if (tableType == null ||
+ !this.availableTables.containsKey(tableType)) {
+ return null;
+ }
+
+ /* Find out which other parameters are supported by this table type
+ * and parse them if they are given. */
+ Set<String> supportedTableParameters = new HashSet<String>(Arrays.
+ asList(this.availableTables.get(tableType).split(",")));
+ Map<String, String[]> recognizedTableParameters =
+ new HashMap<String, String[]>();
+
+ /* Parse start and end dates if supported by the table type. If no end
+ * date is provided, set it to today. If no start date is provided,
+ * set it to 90 days before the end date. Make sure that start date
+ * precedes end date. */
+ if (supportedTableParameters.contains("start") ||
+ supportedTableParameters.contains("end")) {
+ String[] startParameter = null;
+ String[] endParameter = null;
+ if (requestParameters != null) {
+ startParameter = (String[]) requestParameters.get("start");
+ endParameter = (String[]) requestParameters.get("end");
+ }
+ long endTimestamp = System.currentTimeMillis();
+ if (endParameter != null && endParameter.length > 0 &&
+ endParameter[0].length() > 0) {
+ try {
+ endTimestamp = dateFormat.parse(endParameter[0]).getTime();
+ } catch (ParseException e) {
+ return null;
+ }
+ if (!endParameter[0].startsWith("20")) {
+ return null;
+ }
+ }
+ endParameter = new String[] { dateFormat.format(endTimestamp) };
+ long startTimestamp = endTimestamp - 90L * 24L * 60L * 60L * 1000L;
+ if (startParameter != null && startParameter.length > 0 &&
+ startParameter[0].length() > 0) {
+ try {
+ startTimestamp = dateFormat.parse(startParameter[0]).getTime();
+ } catch (ParseException e) {
+ return null;
+ }
+ if (!startParameter[0].startsWith("20")) {
+ return null;
+ }
+ }
+ startParameter = new String[] { dateFormat.format(startTimestamp) };
+ if (startTimestamp > endTimestamp) {
+ return null;
+ }
+ recognizedTableParameters.put("start", startParameter);
+ recognizedTableParameters.put("end", endParameter);
+ }
+
+ /* We now have a map with all required table parameters. Return it. */
+ return recognizedTableParameters;
+ }
+}
+
diff --git a/src/org/torproject/ernie/web/research/ResearchDataServlet.java b/src/org/torproject/ernie/web/research/ResearchDataServlet.java
new file mode 100644
index 0000000..6c24e0d
--- /dev/null
+++ b/src/org/torproject/ernie/web/research/ResearchDataServlet.java
@@ -0,0 +1,260 @@
+/* Copyright 2011, 2012 The Tor Project
+ * See LICENSE for licensing information */
+package org.torproject.ernie.web.research;
+
+import java.io.File;
+import java.io.IOException;
+import java.text.ParseException;
+import java.text.SimpleDateFormat;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Date;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.SortedMap;
+import java.util.TreeMap;
+
+import javax.servlet.ServletException;
+import javax.servlet.http.HttpServlet;
+import javax.servlet.http.HttpServletRequest;
+import javax.servlet.http.HttpServletResponse;
+
+/**
+ * Controller servlet for the Data page. Prepares the various lists of
+ * downloadable metrics data files by parsing a file with URLs on other
+ * servers and looking at a local directory with files served by local
+ * Apache HTTP server. The file with URLs on other servers may contain
+ * comment lines starting with #. Recognizes metrics data file types from
+ * the file names.
+ */
+public class ResearchDataServlet extends HttpServlet {
+
+ private static final long serialVersionUID = -5168280373350515577L;
+
+ public void doGet(HttpServletRequest request,
+ HttpServletResponse response) throws IOException, ServletException {
+
+ /* Read local directory with files served by the local Apache HTTP
+ * server and add the URLs to the list. */
+ List<String> dataFileUrls = new ArrayList<String>();
+ String localDataDir = getServletConfig().getInitParameter(
+ "localDataDir");
+ if (localDataDir != null) {
+ try {
+ File localDataDirFile = new File(localDataDir);
+ if (localDataDirFile.exists() && localDataDirFile.isDirectory()) {
+ for (File localDataFile : localDataDirFile.listFiles()) {
+ if (!localDataFile.isDirectory()) {
+ dataFileUrls.add("/data/" + localDataFile.getName());
+ }
+ }
+ }
+ } catch (SecurityException e) {
+ /* We're not permitted to read the directory with metrics data
+ * files. Ignore. */
+ }
+ }
+
+ /* Prepare data structures that we're going to pass to the JSP. All
+ * data structures are (nested) maps with the map keys being used for
+ * displaying the files in tables and map values being 2-element
+ * arrays containing the file url and optional signature file. */
+ SortedMap<Date, Map<String, String[]>> relayDescriptors =
+ new TreeMap<Date, Map<String, String[]>>(
+ java.util.Collections.reverseOrder());
+ String[] certs = new String[2];
+ SortedMap<Date, String[]> bridgeDescriptors =
+ new TreeMap<Date, String[]>(java.util.Collections.reverseOrder());
+ String[] relayStatistics = new String[2];
+ SortedMap<Date, String[]> torperfTarballs =
+ new TreeMap<Date, String[]>(java.util.Collections.reverseOrder());
+ SortedMap<String, Map<String, String[]>> torperfData =
+ new TreeMap<String, Map<String, String[]>>();
+ SortedMap<Date, String[]> exitLists =
+ new TreeMap<Date, String[]>(java.util.Collections.reverseOrder());
+ SortedMap<Date, String[]> torperfExperiments =
+ new TreeMap<Date, String[]>();
+ SortedMap<Date, String[]> bridgePoolAssignments =
+ new TreeMap<Date, String[]>(java.util.Collections.reverseOrder());
+
+ /* Prepare rewriting Torperf sources. */
+ Map<String, String> torperfSources = new HashMap<String, String>();
+ torperfSources.put("torperffast", "torperf, fastest");
+ torperfSources.put("torperffastratio", "torperf, best ratio");
+ torperfSources.put("torperfslow", "torperf, slowest");
+ torperfSources.put("torperfslowratio", "torperf, worst ratio");
+
+ /* Go through the file list, decide for each file what metrics data
+ * type it is, and put it in the appropriate map. */
+ SimpleDateFormat monthFormat = new SimpleDateFormat("yyyy-MM");
+ SimpleDateFormat dateFormat = new SimpleDateFormat("yyyy-MM-dd");
+ List<String> torperfFilesizes = Arrays.asList("50kb,1mb,5mb".
+ split(","));
+ for (String url : dataFileUrls) {
+ if (!url.contains("/")) {
+ continue;
+ }
+ String filename = url.substring(url.lastIndexOf("/") + 1);
+
+ /* URL contains relay descriptors. */
+ if (filename.startsWith("tor-20") ||
+ filename.startsWith("statuses-20") ||
+ filename.startsWith("server-descriptors-20") ||
+ filename.startsWith("extra-infos-20") ||
+ filename.startsWith("votes-20") ||
+ filename.startsWith("consensuses-20")) {
+ String type = filename.substring(0, filename.indexOf("-20"));
+ String yearMonth = filename.substring(filename.indexOf("20"));
+ yearMonth = yearMonth.substring(0, 7);
+ Date month = null;
+ try {
+ month = monthFormat.parse(yearMonth);
+ } catch (ParseException e) {
+ /* Ignore this URL. */
+ continue;
+ }
+ int index = filename.endsWith(".asc") ? 1 : 0;
+ if (!relayDescriptors.containsKey(month)) {
+ relayDescriptors.put(month, new HashMap<String, String[]>());
+ }
+ if (!relayDescriptors.get(month).containsKey(type)) {
+ relayDescriptors.get(month).put(type, new String[2]);
+ }
+ relayDescriptors.get(month).get(type)[index] = url;
+
+ /* URL contains v3 certificates. */
+ } else if (filename.startsWith("certs.tar")) {
+ int index = filename.endsWith(".asc") ? 1 : 0;
+ certs[index] = url;
+
+ /* URL contains bridge descriptors. */
+ } else if (filename.startsWith("bridge-descriptors-20")) {
+ String yearMonth = filename.substring(filename.indexOf("20"));
+ yearMonth = yearMonth.substring(0, 7);
+ Date month = null;
+ try {
+ month = monthFormat.parse(yearMonth);
+ } catch (ParseException e) {
+ /* Ignore this URL. */
+ continue;
+ }
+ int index = filename.endsWith(".asc") ? 1 : 0;
+ if (!bridgeDescriptors.containsKey(month)) {
+ bridgeDescriptors.put(month, new String[2]);
+ }
+ bridgeDescriptors.get(month)[index] = url;
+
+ /* URL contains relay statistics. */
+ } else if (filename.startsWith("relay-statistics.tar.bz2")) {
+ int index = filename.endsWith(".asc") ? 1 : 0;
+ relayStatistics[index] = url;
+
+ /* URL contains Torperf tarball. */
+ } else if (filename.startsWith("torperf-20")) {
+ String yearMonth = filename.substring(filename.indexOf("20"));
+ yearMonth = yearMonth.substring(0, 7);
+ Date month = null;
+ try {
+ month = monthFormat.parse(yearMonth);
+ } catch (ParseException e) {
+ /* Ignore this URL. */
+ continue;
+ }
+ if (!torperfTarballs.containsKey(month)) {
+ torperfTarballs.put(month, new String[2]);
+ }
+ torperfTarballs.get(month)[0] = url;
+
+ /* URL contains Torperf data file. */
+ } else if (filename.endsWith("b.data") ||
+ filename.endsWith("b.extradata")) {
+ boolean isExtraData = filename.endsWith("b.extradata");
+ String[] parts = filename.split("-");
+ if (parts.length != 2) {
+ continue;
+ }
+ String source = parts[0];
+ if (torperfSources.containsKey(source)) {
+ source = torperfSources.get(source);
+ }
+ String filesize = parts[1];
+ filesize = filesize.substring(0, filesize.length()
+ - (isExtraData ? 10 : 5));
+ if (!torperfFilesizes.contains(filesize)) {
+ continue;
+ }
+ if (!torperfData.containsKey(source)) {
+ torperfData.put(source, new HashMap<String, String[]>());
+ }
+ if (!torperfData.get(source).containsKey(filesize)) {
+ torperfData.get(source).put(filesize, new String[2]);
+ }
+ torperfData.get(source).get(filesize)[isExtraData ? 1 : 0] = url;
+
+ /* URL contains Torperf experiment tarball. */
+ } else if (filename.startsWith("torperf-experiment-20")) {
+ String dateString = filename.substring(filename.indexOf("20"));
+ dateString = dateString.substring(0, 10);
+ Date date = null;
+ try {
+ date = dateFormat.parse(dateString);
+ } catch (ParseException e) {
+ /* Ignore this URL. */
+ continue;
+ }
+ if (!torperfExperiments.containsKey(date)) {
+ torperfExperiments.put(date, new String[2]);
+ }
+ torperfExperiments.get(date)[0] = url;
+
+ /* URL contains exit list. */
+ } else if (filename.startsWith("exit-list-20")) {
+ String yearMonth = filename.substring(filename.indexOf("20"));
+ yearMonth = yearMonth.substring(0, 7);
+ Date month = null;
+ try {
+ month = monthFormat.parse(yearMonth);
+ } catch (ParseException e) {
+ /* Ignore this URL. */
+ continue;
+ }
+ if (!exitLists.containsKey(month)) {
+ exitLists.put(month, new String[2]);
+ }
+ exitLists.get(month)[0] = url;
+
+ /* URL contains bridge pool assignments. */
+ } else if (filename.startsWith("bridge-pool-assignments-20")) {
+ String yearMonth = filename.substring(filename.indexOf("20"));
+ yearMonth = yearMonth.substring(0, 7);
+ Date month = null;
+ try {
+ month = monthFormat.parse(yearMonth);
+ } catch (ParseException e) {
+ /* Ignore this URL. */
+ continue;
+ }
+ if (!bridgePoolAssignments.containsKey(month)) {
+ bridgePoolAssignments.put(month, new String[2]);
+ }
+ bridgePoolAssignments.get(month)[0] = url;
+ }
+ }
+
+ /* Add the maps to the request and forward it to the JSP to display
+ * the page. */
+ request.setAttribute("relayDescriptors", relayDescriptors);
+ request.setAttribute("certs", certs);
+ request.setAttribute("bridgeDescriptors", bridgeDescriptors);
+ request.setAttribute("relayStatistics", relayStatistics);
+ request.setAttribute("torperfData", torperfData);
+ request.setAttribute("exitLists", exitLists);
+ request.setAttribute("torperfTarballs", torperfTarballs);
+ request.setAttribute("torperfExperiments", torperfExperiments);
+ request.setAttribute("bridgePoolAssignments", bridgePoolAssignments);
+ request.getRequestDispatcher("WEB-INF/data.jsp").forward(request,
+ response);
+ }
+}
+
diff --git a/src/org/torproject/ernie/web/research/ResearchFormatsServlet.java b/src/org/torproject/ernie/web/research/ResearchFormatsServlet.java
new file mode 100644
index 0000000..3b70ca3
--- /dev/null
+++ b/src/org/torproject/ernie/web/research/ResearchFormatsServlet.java
@@ -0,0 +1,24 @@
+/* Copyright 2011, 2012 The Tor Project
+ * See LICENSE for licensing information */
+package org.torproject.ernie.web.research;
+
+import java.io.IOException;
+
+import javax.servlet.ServletException;
+import javax.servlet.http.HttpServlet;
+import javax.servlet.http.HttpServletRequest;
+import javax.servlet.http.HttpServletResponse;
+
+public class ResearchFormatsServlet extends HttpServlet {
+
+ private static final long serialVersionUID = 5666493868675314116L;
+
+ public void doGet(HttpServletRequest request,
+ HttpServletResponse response) throws IOException, ServletException {
+
+ /* Forward the request to the JSP that does all the hard work. */
+ request.getRequestDispatcher("WEB-INF/formats.jsp").forward(request,
+ response);
+ }
+}
+
diff --git a/src/org/torproject/ernie/web/research/ResearchPapersServlet.java b/src/org/torproject/ernie/web/research/ResearchPapersServlet.java
new file mode 100644
index 0000000..a63eef0
--- /dev/null
+++ b/src/org/torproject/ernie/web/research/ResearchPapersServlet.java
@@ -0,0 +1,24 @@
+/* Copyright 2011, 2012 The Tor Project
+ * See LICENSE for licensing information */
+package org.torproject.ernie.web.research;
+
+import java.io.IOException;
+
+import javax.servlet.ServletException;
+import javax.servlet.http.HttpServlet;
+import javax.servlet.http.HttpServletRequest;
+import javax.servlet.http.HttpServletResponse;
+
+public class ResearchPapersServlet extends HttpServlet {
+
+ private static final long serialVersionUID = -8135459207158536268L;
+
+ public void doGet(HttpServletRequest request,
+ HttpServletResponse response) throws IOException, ServletException {
+
+ /* Forward the request to the JSP that does all the hard work. */
+ request.getRequestDispatcher("WEB-INF/papers.jsp").forward(request,
+ response);
+ }
+}
+
diff --git a/src/org/torproject/ernie/web/research/ResearchToolsServlet.java b/src/org/torproject/ernie/web/research/ResearchToolsServlet.java
new file mode 100644
index 0000000..173a1da
--- /dev/null
+++ b/src/org/torproject/ernie/web/research/ResearchToolsServlet.java
@@ -0,0 +1,24 @@
+/* Copyright 2011, 2012 The Tor Project
+ * See LICENSE for licensing information */
+package org.torproject.ernie.web.research;
+
+import java.io.IOException;
+
+import javax.servlet.ServletException;
+import javax.servlet.http.HttpServlet;
+import javax.servlet.http.HttpServletRequest;
+import javax.servlet.http.HttpServletResponse;
+
+public class ResearchToolsServlet extends HttpServlet {
+
+ private static final long serialVersionUID = -3344204426180358872L;
+
+ public void doGet(HttpServletRequest request,
+ HttpServletResponse response) throws IOException, ServletException {
+
+ /* Forward the request to the JSP that does all the hard work. */
+ request.getRequestDispatcher("WEB-INF/tools.jsp").forward(request,
+ response);
+ }
+}
+
_______________________________________________
tor-commits mailing list
tor-commits@xxxxxxxxxxxxxxxxxxxx
https://lists.torproject.org/cgi-bin/mailman/listinfo/tor-commits