[Author Prev][Author Next][Thread Prev][Thread Next][Author Index][Thread Index]
[tor-commits] [metrics-web/master] Update Java package structure of website classes.
commit bd8adfa21554696be51ce16e4cfa60fb8ba939cb
Author: Karsten Loesing <karsten.loesing@xxxxxxx>
Date: Tue Jan 21 17:27:14 2014 +0100
Update Java package structure of website classes.
- org.torproject.ernie.status.doctor -> org.torproject.metrics.web.status
- org.torproject.ernie.status.exonerator -> org.torproject.metrics.web.status
- org.torproject.ernie.status.relaysearch -> org.torproject.metrics.web.status
- org.torproject.ernie.web -> org.torproject.metrics.web
- org.torproject.ernie.web.graphs -> org.torproject.metrics.web.graphs
- org.torproject.ernie.web.research -> org.torproject.metrics.web.research
---
website/etc/web.xml | 34 +-
.../status/doctor/ConsensusHealthServlet.java | 57 ---
.../ernie/status/exonerator/ExoneraTorServlet.java | 24 -
.../status/relaysearch/RelaySearchServlet.java | 535 --------------------
.../org/torproject/ernie/web/GraphsServlet.java | 24 -
.../src/org/torproject/ernie/web/IndexServlet.java | 24 -
.../org/torproject/ernie/web/ResearchServlet.java | 24 -
.../org/torproject/ernie/web/StatusServlet.java | 24 -
.../ernie/web/graphs/BubblesServlet.java | 24 -
.../org/torproject/ernie/web/graphs/Countries.java | 285 -----------
.../torproject/ernie/web/graphs/CsvServlet.java | 97 ----
.../ernie/web/graphs/GraphImageServlet.java | 76 ---
.../ernie/web/graphs/GraphParameterChecker.java | 280 ----------
.../ernie/web/graphs/GraphsSubpagesServlet.java | 162 ------
.../org/torproject/ernie/web/graphs/RObject.java | 23 -
.../ernie/web/graphs/RObjectGenerator.java | 398 ---------------
.../ernie/web/graphs/TableParameterChecker.java | 120 -----
.../ernie/web/research/ResearchDataServlet.java | 260 ----------
.../ernie/web/research/ResearchFormatsServlet.java | 24 -
.../ernie/web/research/ResearchPapersServlet.java | 24 -
.../ernie/web/research/ResearchStatsServlet.java | 132 -----
.../ernie/web/research/ResearchToolsServlet.java | 24 -
.../org/torproject/metrics/web/GraphsServlet.java | 24 +
.../org/torproject/metrics/web/IndexServlet.java | 24 +
.../torproject/metrics/web/ResearchServlet.java | 24 +
.../org/torproject/metrics/web/StatusServlet.java | 24 +
.../metrics/web/graphs/BubblesServlet.java | 24 +
.../torproject/metrics/web/graphs/Countries.java | 285 +++++++++++
.../torproject/metrics/web/graphs/CsvServlet.java | 97 ++++
.../metrics/web/graphs/GraphImageServlet.java | 76 +++
.../metrics/web/graphs/GraphParameterChecker.java | 280 ++++++++++
.../metrics/web/graphs/GraphsSubpagesServlet.java | 162 ++++++
.../org/torproject/metrics/web/graphs/RObject.java | 23 +
.../metrics/web/graphs/RObjectGenerator.java | 398 +++++++++++++++
.../metrics/web/graphs/TableParameterChecker.java | 120 +++++
.../metrics/web/research/ResearchDataServlet.java | 260 ++++++++++
.../web/research/ResearchFormatsServlet.java | 24 +
.../web/research/ResearchPapersServlet.java | 24 +
.../metrics/web/research/ResearchStatsServlet.java | 132 +++++
.../metrics/web/research/ResearchToolsServlet.java | 24 +
.../metrics/web/status/ConsensusHealthServlet.java | 57 +++
.../metrics/web/status/ExoneraTorServlet.java | 24 +
.../metrics/web/status/RelaySearchServlet.java | 535 ++++++++++++++++++++
43 files changed, 2658 insertions(+), 2658 deletions(-)
diff --git a/website/etc/web.xml b/website/etc/web.xml
index 992de93..f85d500 100644
--- a/website/etc/web.xml
+++ b/website/etc/web.xml
@@ -9,7 +9,7 @@
<servlet>
<servlet-name>Index</servlet-name>
<servlet-class>
- org.torproject.ernie.web.IndexServlet
+ org.torproject.metrics.web.IndexServlet
</servlet-class>
</servlet>
<servlet-mapping>
@@ -20,7 +20,7 @@
<servlet>
<servlet-name>Graphs</servlet-name>
<servlet-class>
- org.torproject.ernie.web.GraphsServlet
+ org.torproject.metrics.web.GraphsServlet
</servlet-class>
</servlet>
<servlet-mapping>
@@ -31,7 +31,7 @@
<servlet>
<servlet-name>GraphsSubpages</servlet-name>
<servlet-class>
- org.torproject.ernie.web.graphs.GraphsSubpagesServlet
+ org.torproject.metrics.web.graphs.GraphsSubpagesServlet
</servlet-class>
</servlet>
<servlet-mapping>
@@ -54,7 +54,7 @@
<servlet>
<servlet-name>Bubbles</servlet-name>
<servlet-class>
- org.torproject.ernie.web.graphs.BubblesServlet
+ org.torproject.metrics.web.graphs.BubblesServlet
</servlet-class>
</servlet>
<servlet-mapping>
@@ -65,7 +65,7 @@
<servlet>
<servlet-name>Research</servlet-name>
<servlet-class>
- org.torproject.ernie.web.ResearchServlet
+ org.torproject.metrics.web.ResearchServlet
</servlet-class>
</servlet>
<servlet-mapping>
@@ -76,7 +76,7 @@
<servlet>
<servlet-name>ResearchData</servlet-name>
<servlet-class>
- org.torproject.ernie.web.research.ResearchDataServlet
+ org.torproject.metrics.web.research.ResearchDataServlet
</servlet-class>
<init-param>
<param-name>localDataDir</param-name>
@@ -93,7 +93,7 @@
<servlet>
<servlet-name>ResearchPapers</servlet-name>
<servlet-class>
- org.torproject.ernie.web.research.ResearchPapersServlet
+ org.torproject.metrics.web.research.ResearchPapersServlet
</servlet-class>
</servlet>
<servlet-mapping>
@@ -104,7 +104,7 @@
<servlet>
<servlet-name>ResearchTools</servlet-name>
<servlet-class>
- org.torproject.ernie.web.research.ResearchToolsServlet
+ org.torproject.metrics.web.research.ResearchToolsServlet
</servlet-class>
</servlet>
<servlet-mapping>
@@ -115,7 +115,7 @@
<servlet>
<servlet-name>ResearchFormats</servlet-name>
<servlet-class>
- org.torproject.ernie.web.research.ResearchFormatsServlet
+ org.torproject.metrics.web.research.ResearchFormatsServlet
</servlet-class>
</servlet>
<servlet-mapping>
@@ -126,7 +126,7 @@
<servlet>
<servlet-name>ResearchStats</servlet-name>
<servlet-class>
- org.torproject.ernie.web.research.ResearchStatsServlet
+ org.torproject.metrics.web.research.ResearchStatsServlet
</servlet-class>
<init-param>
<param-name>statsDir</param-name>
@@ -144,7 +144,7 @@
<servlet>
<servlet-name>Status</servlet-name>
<servlet-class>
- org.torproject.ernie.web.StatusServlet
+ org.torproject.metrics.web.StatusServlet
</servlet-class>
</servlet>
<servlet-mapping>
@@ -155,7 +155,7 @@
<servlet>
<servlet-name>RelaySearch</servlet-name>
<servlet-class>
- org.torproject.ernie.status.relaysearch.RelaySearchServlet
+ org.torproject.metrics.web.status.RelaySearchServlet
</servlet-class>
</servlet>
<servlet-mapping>
@@ -166,7 +166,7 @@
<servlet>
<servlet-name>GraphImage</servlet-name>
<servlet-class>
- org.torproject.ernie.web.graphs.GraphImageServlet
+ org.torproject.metrics.web.graphs.GraphImageServlet
</servlet-class>
</servlet>
<servlet-mapping>
@@ -233,7 +233,7 @@
<servlet>
<servlet-name>Csv</servlet-name>
<servlet-class>
- org.torproject.ernie.web.graphs.CsvServlet
+ org.torproject.metrics.web.graphs.CsvServlet
</servlet-class>
</servlet>
<servlet-mapping>
@@ -244,7 +244,7 @@
<servlet>
<servlet-name>ExoneraTor</servlet-name>
<servlet-class>
- org.torproject.ernie.status.exonerator.ExoneraTorServlet
+ org.torproject.metrics.web.status.ExoneraTorServlet
</servlet-class>
</servlet>
<servlet-mapping>
@@ -255,7 +255,7 @@
<servlet>
<servlet-name>ConsensusHealthServlet</servlet-name>
<servlet-class>
- org.torproject.ernie.status.doctor.ConsensusHealthServlet
+ org.torproject.metrics.web.status.ConsensusHealthServlet
</servlet-class>
</servlet>
<servlet-mapping>
@@ -312,7 +312,7 @@
<listener>
<listener-class>
- org.torproject.ernie.web.graphs.RObjectGenerator
+ org.torproject.metrics.web.graphs.RObjectGenerator
</listener-class>
</listener>
diff --git a/website/src/org/torproject/ernie/status/doctor/ConsensusHealthServlet.java b/website/src/org/torproject/ernie/status/doctor/ConsensusHealthServlet.java
deleted file mode 100644
index 330708f..0000000
--- a/website/src/org/torproject/ernie/status/doctor/ConsensusHealthServlet.java
+++ /dev/null
@@ -1,57 +0,0 @@
-/* Copyright 2011, 2012 The Tor Project
- * See LICENSE for licensing information */
-package org.torproject.ernie.status.doctor;
-
-import java.io.BufferedInputStream;
-import java.io.BufferedOutputStream;
-import java.io.File;
-import java.io.FileInputStream;
-import java.io.IOException;
-
-import javax.servlet.ServletException;
-import javax.servlet.http.HttpServlet;
-import javax.servlet.http.HttpServletRequest;
-import javax.servlet.http.HttpServletResponse;
-
-public class ConsensusHealthServlet extends HttpServlet {
-
- private static final long serialVersionUID = -5230032733057814869L;
-
- public void doGet(HttpServletRequest request,
- HttpServletResponse response) throws IOException,
- ServletException {
-
- /* Read file from disk and write it to response. */
- BufferedInputStream input = null;
- BufferedOutputStream output = null;
- try {
- File f = new File("/srv/metrics.torproject.org/ernie/website/"
- + "consensus-health.html");
- if (!f.exists()) {
- response.sendError(HttpServletResponse.SC_NOT_FOUND);
- return;
- }
- response.setContentType(this.getServletContext().getMimeType(f.getName()));
- response.setHeader("Content-Length", String.valueOf(
- f.length()));
- response.setHeader("Content-Disposition",
- "inline; filename=\"" + f.getName() + "\"");
- input = new BufferedInputStream(new FileInputStream(f),
- 1024);
- output = new BufferedOutputStream(response.getOutputStream(), 1024);
- byte[] buffer = new byte[1024];
- int length;
- while ((length = input.read(buffer)) > 0) {
- output.write(buffer, 0, length);
- }
- } finally {
- if (output != null) {
- output.close();
- }
- if (input != null) {
- input.close();
- }
- }
- }
-}
-
diff --git a/website/src/org/torproject/ernie/status/exonerator/ExoneraTorServlet.java b/website/src/org/torproject/ernie/status/exonerator/ExoneraTorServlet.java
deleted file mode 100644
index d37b9a8..0000000
--- a/website/src/org/torproject/ernie/status/exonerator/ExoneraTorServlet.java
+++ /dev/null
@@ -1,24 +0,0 @@
-/* Copyright 2011, 2012 The Tor Project
- * See LICENSE for licensing information */
-package org.torproject.ernie.status.exonerator;
-
-import java.io.IOException;
-
-import javax.servlet.ServletException;
-import javax.servlet.http.HttpServlet;
-import javax.servlet.http.HttpServletRequest;
-import javax.servlet.http.HttpServletResponse;
-
-public class ExoneraTorServlet extends HttpServlet {
-
- private static final long serialVersionUID = -6227541092325776626L;
-
- public void doGet(HttpServletRequest request,
- HttpServletResponse response) throws IOException, ServletException {
-
- /* Forward the request to the JSP that does all the hard work. */
- request.getRequestDispatcher("WEB-INF/exonerator.jsp").forward(
- request, response);
- }
-}
-
diff --git a/website/src/org/torproject/ernie/status/relaysearch/RelaySearchServlet.java b/website/src/org/torproject/ernie/status/relaysearch/RelaySearchServlet.java
deleted file mode 100644
index b7c8291..0000000
--- a/website/src/org/torproject/ernie/status/relaysearch/RelaySearchServlet.java
+++ /dev/null
@@ -1,535 +0,0 @@
-/* Copyright 2011, 2012 The Tor Project
- * See LICENSE for licensing information */
-package org.torproject.ernie.status.relaysearch;
-
-import java.io.IOException;
-import java.io.UnsupportedEncodingException;
-import java.math.BigInteger;
-import java.sql.Connection;
-import java.sql.ResultSet;
-import java.sql.SQLException;
-import java.sql.Statement;
-import java.text.ParseException;
-import java.text.SimpleDateFormat;
-import java.util.ArrayList;
-import java.util.Calendar;
-import java.util.Collections;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-import java.util.SortedMap;
-import java.util.SortedSet;
-import java.util.TimeZone;
-import java.util.TreeMap;
-import java.util.TreeSet;
-import java.util.logging.Level;
-import java.util.logging.Logger;
-import java.util.regex.Pattern;
-
-import javax.naming.Context;
-import javax.naming.InitialContext;
-import javax.naming.NamingException;
-import javax.servlet.ServletException;
-import javax.servlet.http.HttpServlet;
-import javax.servlet.http.HttpServletRequest;
-import javax.servlet.http.HttpServletResponse;
-import javax.sql.DataSource;
-
-import org.apache.commons.codec.binary.Base64;
-
-/**
- * Web page that allows users to search for relays in the descriptor
- * archives.
- *
- * Possible search terms for testing:
- * - gabelmoo
- * - gabelmoo 2010-09
- * - gabelmoo 2010-09-18
- * - gabelmoo $F2044413DAC2E02E3D6BCF4735A19BCA1DE97281
- * - gabelmoo 80.190.246
- * - gabelmoo $F2044413DAC2E02E3D6BCF4735A19BCA1DE97281 80.190.246
- * - 5898549205 dc737cc9dca16af6 79.212.74.45
- * - 5898549205 dc737cc9dca16af6
- * - 80.190.246.100
- * - $F2044413DAC2E02E3D6BCF4735A19BCA1DE97281
- * - $F2044413DAC2E02E3D6BCF4735A19BCA1DE97281 80.190.246
- * - 58985492
- * - 58985492 79.212.74.45
- */
-public class RelaySearchServlet extends HttpServlet {
-
- private static final long serialVersionUID = -1772662230310611805L;
-
- private Pattern alphaNumDotDashDollarSpacePattern =
- Pattern.compile("[A-Za-z0-9\\.\\-$ ]+");
-
- private Pattern numPattern = Pattern.compile("[0-9]+");
-
- private Pattern hexPattern = Pattern.compile("[A-Fa-f0-9]+");
-
- private Pattern alphaNumPattern = Pattern.compile("[A-Za-z0-9]+");
-
- private SimpleDateFormat dayFormat = new SimpleDateFormat("yyyy-MM-dd");
-
- private SimpleDateFormat monthFormat = new SimpleDateFormat("yyyy-MM");
-
- private SimpleDateFormat dateTimeFormat =
- new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
-
- private long minValidAfterMillis;
-
- private DataSource ds;
-
- private Logger logger;
-
- public void init() {
-
- /* Initialize logger. */
- this.logger = Logger.getLogger(RelaySearchServlet.class.toString());
-
- /* Initialize date format parsers. */
- dayFormat.setTimeZone(TimeZone.getTimeZone("UTC"));
- monthFormat.setTimeZone(TimeZone.getTimeZone("UTC"));
- dateTimeFormat.setTimeZone(TimeZone.getTimeZone("UTC"));
-
- /* Look up data source. */
- try {
- Context cxt = new InitialContext();
- this.ds = (DataSource) cxt.lookup("java:comp/env/jdbc/tordir");
- this.logger.info("Successfully looked up data source.");
- } catch (NamingException e) {
- this.logger.log(Level.WARNING, "Could not look up data source", e);
- }
-
- /* Look up first consensus in the database. */
- try {
- long requestedConnection = System.currentTimeMillis();
- Connection conn = this.ds.getConnection();
- String query = "SELECT MIN(validafter) AS first FROM consensus";
- Statement statement = conn.createStatement();
- ResultSet rs = statement.executeQuery(query);
- if (rs.next()) {
- this.minValidAfterMillis = rs.getTimestamp(1).getTime();
- }
- rs.close();
- statement.close();
- conn.close();
- this.logger.info("Returned a database connection to the pool "
- + "after " + (System.currentTimeMillis()
- - requestedConnection) + " millis.");
- } catch (SQLException e) {
- this.logger.log(Level.WARNING, "Could not look up first consensus "
- + "valid-after time in the database.", e);
- }
- }
-
- public void doGet(HttpServletRequest request,
- HttpServletResponse response) throws IOException,
- ServletException {
-
- /* Read search parameter. If we don't have a search parameter, we're
- * done here. */
- String searchParameter = request.getParameter("search");
- if (searchParameter == null || searchParameter.length() == 0) {
- request.getRequestDispatcher("WEB-INF/relay-search.jsp").forward(
- request, response);
- return;
- }
-
- /* Parse search parameter to identify what nickname, fingerprint,
- * and/or IP address to search for. A valid query contains no more
- * than one identifier for each of the fields. As a special case,
- * there are search terms consisting of 8 to 19 hex characters that
- * can be either a nickname or a fingerprint. */
- String searchNickname = "";
- String searchFingerprint = "";
- String searchIPAddress = "";
- SortedSet<String> searchDays = new TreeSet<String>();
- SortedSet<String> searchMonths = new TreeSet<String>();
- SortedSet<Long> searchDayTimestamps = new TreeSet<Long>();
- SortedSet<Long> searchMonthTimestamps = new TreeSet<Long>();
- boolean validQuery = false;
-
- /* Only parse search parameter if it contains nothing else than
- * alphanumeric characters, dots, and spaces. */
- if (alphaNumDotDashDollarSpacePattern.matcher(searchParameter).
- matches()) {
- SortedSet<String> searchTerms = new TreeSet<String>();
- if (searchParameter.trim().contains(" ")) {
- String[] split = searchParameter.trim().split(" ");
- for (int i = 0; i < split.length; i++) {
- if (split[i].length() > 0) {
- searchTerms.add(split[i]);
- }
- }
- } else {
- searchTerms.add(searchParameter.trim());
- }
-
- /* Parse each search term separately. */
- for (String searchTerm : searchTerms) {
-
- /* If the search term contains a dot, it can only be an IP
- * address. */
- if (searchTerm.contains(".") && !searchTerm.startsWith(".")) {
- String[] octets = searchTerm.split("\\.");
- if (searchIPAddress.length() > 0 || octets.length < 2 ||
- octets.length > 4) {
- validQuery = false;
- break;
- }
- boolean invalidOctet = false;
- StringBuilder sb = new StringBuilder();
- for (int i = 0; i < octets.length; i++) {
- if (!numPattern.matcher(octets[i]).matches() ||
- octets[i].length() > 3 ||
- Integer.parseInt(octets[i]) > 255) {
- invalidOctet = true;
- break;
- } else {
- sb.append("." + Integer.parseInt(octets[i]));
- }
- }
- if (invalidOctet) {
- validQuery = false;
- break;
- }
- if (octets.length < 4) {
- sb.append(".");
- }
- searchIPAddress = sb.toString().substring(1);
- validQuery = true;
- }
-
- /* If the search term contains hyphens, it must be a month or a
- * day. */
- else if (searchTerm.contains("-") &&
- searchTerm.startsWith("20")) {
- try {
- if (searchTerm.length() == 10) {
- searchDayTimestamps.add(dayFormat.parse(searchTerm).
- getTime());
- searchDays.add(searchTerm);
- } else if (searchTerm.length() == 7) {
- searchMonthTimestamps.add(monthFormat.parse(searchTerm).
- getTime());
- searchMonths.add(searchTerm);
- } else {
- validQuery = false;
- break;
- }
- } catch (ParseException e) {
- validQuery = false;
- break;
- }
- }
-
- /* If the search term starts with a $ followed by 8 to 40 hex
- * characters, it must be a fingerprint. */
- else if ((searchTerm.length() >= 9 && searchTerm.length() <= 41 &&
- searchTerm.startsWith("$") &&
- hexPattern.matcher(searchTerm.substring(1)).matches()) ||
- (searchTerm.length() > 19 && searchTerm.length() <= 40 &&
- !searchTerm.startsWith("$") &&
- hexPattern.matcher(searchTerm).matches())) {
- if (searchFingerprint.length() > 0) {
- validQuery = false;
- break;
- }
- searchFingerprint = searchTerm.substring(
- (searchTerm.startsWith("$") ? 1 : 0));
- validQuery = true;
- }
-
- /* If the search term contains up to 19 alphanumerical characters,
- * it must be a nickname. */
- else if (searchTerm.length() <= 19 &&
- alphaNumPattern.matcher(searchTerm).matches()) {
- if (searchNickname.length() > 0) {
- validQuery = false;
- break;
- }
- searchNickname = searchTerm;
- validQuery = true;
- }
-
- /* We didn't recognize this search term. */
- else {
- validQuery = false;
- break;
- }
- }
- }
-
- /* We only accept at most one month or three days, but not both, or
- * people could accidentally keep the database busy. */
- if (searchDays.size() > 3 || searchMonths.size() > 1 ||
- (searchMonths.size() == 1 && searchDays.size() > 0)) {
- validQuery = false;
- }
-
- /* If the query is invalid, stop here. */
- if (!validQuery) {
- request.setAttribute("invalidQuery", "Query is invalid.");
- request.getRequestDispatcher("WEB-INF/relay-search.jsp").
- forward(request, response);
- return;
- }
-
- /* Look up last consensus in the database. */
- long maxValidAfterMillis = -1L;
- try {
- long requestedConnection = System.currentTimeMillis();
- Connection conn = this.ds.getConnection();
- String query = "SELECT MAX(validafter) AS last FROM consensus";
- Statement statement = conn.createStatement();
- ResultSet rs = statement.executeQuery(query);
- if (rs.next()) {
- maxValidAfterMillis = rs.getTimestamp(1).getTime();
- }
- rs.close();
- statement.close();
- conn.close();
- this.logger.info("Returned a database connection to the pool "
- + "after " + (System.currentTimeMillis()
- - requestedConnection) + " millis.");
- } catch (SQLException e) {
- this.logger.log(Level.WARNING, "Could not look up last consensus "
- + "valid-after time in the database.", e);
- }
-
- /* Prepare a string that says what we're searching for. */
- List<String> recognizedSearchTerms = new ArrayList<String>();
- if (searchNickname.length() > 0) {
- recognizedSearchTerms.add("nickname <b>" + searchNickname + "</b>");
- }
- if (searchFingerprint.length() > 0) {
- recognizedSearchTerms.add("fingerprint <b>" + searchFingerprint
- + "</b>");
- }
- if (searchIPAddress.length() > 0) {
- recognizedSearchTerms.add("IP address <b>" + searchIPAddress
- + "</b>");
- }
- List<String> recognizedIntervals = new ArrayList<String>();
- for (String searchTerm : searchMonths) {
- recognizedIntervals.add("in <b>" + searchTerm + "</b>");
- }
- for (String searchTerm : searchDays) {
- recognizedIntervals.add("on <b>" + searchTerm + "</b>");
- }
- StringBuilder searchNoticeBuilder = new StringBuilder();
- if (maxValidAfterMillis > 0L) {
- searchNoticeBuilder.append("Most recent consensus in database is "
- + "from " + dateTimeFormat.format(maxValidAfterMillis)
- + ".</p><p>");
- }
- searchNoticeBuilder.append("Searching for relays with ");
- if (recognizedSearchTerms.size() == 1) {
- searchNoticeBuilder.append(recognizedSearchTerms.get(0));
- } else if (recognizedSearchTerms.size() == 2) {
- searchNoticeBuilder.append(recognizedSearchTerms.get(0) + " and "
- + recognizedSearchTerms.get(1));
- } else {
- for (int i = 0; i < recognizedSearchTerms.size() - 1; i++) {
- searchNoticeBuilder.append(recognizedSearchTerms.get(i) + ", ");
- }
- searchNoticeBuilder.append("and " + recognizedSearchTerms.get(
- recognizedSearchTerms.size() - 1));
- }
- if (recognizedIntervals.size() == 1) {
- searchNoticeBuilder.append(" running "
- + recognizedIntervals.get(0));
- } else if (recognizedIntervals.size() == 2) {
- searchNoticeBuilder.append(" running " + recognizedIntervals.get(0)
- + " and/or " + recognizedIntervals.get(1));
- } else if (recognizedIntervals.size() > 2) {
- searchNoticeBuilder.append(" running ");
- for (int i = 0; i < recognizedIntervals.size() - 1; i++) {
- searchNoticeBuilder.append(recognizedIntervals.get(i) + ", ");
- }
- searchNoticeBuilder.append("and/or " + recognizedIntervals.get(
- recognizedIntervals.size() - 1));
- }
- searchNoticeBuilder.append(" ...");
- String searchNotice = searchNoticeBuilder.toString();
- request.setAttribute("searchNotice", searchNotice);
-
- /* Prepare the query string. */
- StringBuilder conditionBuilder = new StringBuilder();
- boolean addAnd = false;
- if (searchNickname.length() > 0) {
- conditionBuilder.append((addAnd ? "AND " : "")
- + "LOWER(nickname) LIKE '" + searchNickname.toLowerCase()
- + "%' ");
- addAnd = true;
- }
- if (searchFingerprint.length() > 0) {
- conditionBuilder.append((addAnd ? "AND " : "")
- + "fingerprint LIKE '" + searchFingerprint.toLowerCase()
- + "%' ");
- addAnd = true;
- }
- if (searchIPAddress.length() > 0) {
- conditionBuilder.append((addAnd ? "AND " : "")
- + "address LIKE '" + searchIPAddress + "%' ");
- addAnd = true;
- }
- List<String> timeIntervals = new ArrayList<String>();
- if (searchDayTimestamps.size() > 0 ||
- searchMonthTimestamps.size() > 0) {
- StringBuilder timeIntervalBuilder = new StringBuilder();
- boolean addOr = false;
- timeIntervalBuilder.append("AND (");
- for (long searchTimestamp : searchDayTimestamps) {
- if (searchTimestamp < this.minValidAfterMillis) {
- request.setAttribute("outsideInterval", "Returned search "
- + "results may be incomplete, as our data only dates back "
- + "to " + dateTimeFormat.format(this.minValidAfterMillis)
- + ". Older archives are not available.");
- }
- timeIntervalBuilder.append((addOr ? "OR " : "")
- + "(validafter >= '"
- + dateTimeFormat.format(searchTimestamp) + "' AND "
- + "validafter < '" + dateTimeFormat.format(searchTimestamp
- + 24L * 60L * 60L * 1000L) + "') ");
- addOr = true;
- }
- for (long searchTimestamp : searchMonthTimestamps) {
- if (searchTimestamp < this.minValidAfterMillis) {
- request.setAttribute("outsideInterval", "Returned search "
- + "results may be incomplete, as our data only dates back "
- + "to " + dateTimeFormat.format(this.minValidAfterMillis)
- + ". Older archives are not available.");
- }
- Calendar firstOfNextMonth = Calendar.getInstance(
- TimeZone.getTimeZone("UTC"));
- firstOfNextMonth.setTimeInMillis(searchTimestamp);
- firstOfNextMonth.add(Calendar.MONTH, 1);
- timeIntervalBuilder.append((addOr ? "OR " : "")
- + "(validafter >= '"
- + dateTimeFormat.format(searchTimestamp) + "' AND "
- + "validafter < '" + dateTimeFormat.format(
- firstOfNextMonth.getTimeInMillis()) + "') ");
- addOr = true;
- }
- timeIntervalBuilder.append(") ");
- timeIntervals.add(timeIntervalBuilder.toString());
- } else {
- timeIntervals.add("AND validafter >= '"
- + dateTimeFormat.format(System.currentTimeMillis()
- - 4L * 24L * 60L * 60L * 1000L) + "' ");
- timeIntervals.add("AND validafter >= '"
- + dateTimeFormat.format(System.currentTimeMillis()
- - 30L * 24L * 60L * 60L * 1000L) + "' ");
- }
- List<String> queries = new ArrayList<String>();
- for (String timeInterval : timeIntervals) {
- StringBuilder queryBuilder = new StringBuilder();
- queryBuilder.append("SELECT validafter, fingerprint, descriptor, "
- + "rawdesc FROM statusentry WHERE validafter IN (SELECT "
- + "validafter FROM statusentry WHERE ");
- queryBuilder.append(conditionBuilder.toString());
- queryBuilder.append(timeInterval);
- queryBuilder.append("ORDER BY validafter DESC LIMIT 31) AND ");
- queryBuilder.append(conditionBuilder.toString());
- queryBuilder.append(timeInterval);
- queries.add(queryBuilder.toString());
- }
-
- /* Actually execute the query. */
- long startedQuery = System.currentTimeMillis();
- SortedMap<String, SortedSet<String>> foundDescriptors =
- new TreeMap<String, SortedSet<String>>(
- Collections.reverseOrder());
- Map<String, String> rawValidAfterLines =
- new HashMap<String, String>();
- Map<String, String> rawStatusEntries = new HashMap<String, String>();
- String query = null;
- int matches = 0;
- try {
- long requestedConnection = System.currentTimeMillis();
- Connection conn = this.ds.getConnection();
- while (!queries.isEmpty()) {
- query = queries.remove(0);
- this.logger.info("Running query '" + query + "'.");
- Statement statement = conn.createStatement();
- ResultSet rs = statement.executeQuery(query);
- while (rs.next()) {
- matches++;
- String validAfter = rs.getTimestamp(1).toString().
- substring(0, 19);
- String fingerprint = rs.getString(2);
- if (!foundDescriptors.containsKey(validAfter)) {
- foundDescriptors.put(validAfter, new TreeSet<String>());
- }
- foundDescriptors.get(validAfter).add(validAfter + " "
- + fingerprint);
- if (!rawValidAfterLines.containsKey(validAfter)) {
- rawValidAfterLines.put(validAfter, "<tt>valid-after "
- + "<a href=\"https://exonerator.torproject.org/"
- + "consensus?valid-after="
- + validAfter.replaceAll(":", "-").replaceAll(" ", "-")
- + "\" target=\"_blank\">" + validAfter + "</a></tt><br>");
- }
- byte[] rawStatusEntry = rs.getBytes(4);
- String statusEntryLines = null;
- try {
- statusEntryLines = new String(rawStatusEntry, "US-ASCII");
- } catch (UnsupportedEncodingException e) {
- /* This shouldn't happen, because we know that ASCII is
- * supported. */
- }
- StringBuilder rawStatusEntryBuilder = new StringBuilder();
- String[] lines = statusEntryLines.split("\n");
- for (String line : lines) {
- if (line.startsWith("r ")) {
- String[] parts = line.split(" ");
- String descriptorBase64 = String.format("%040x",
- new BigInteger(1, Base64.decodeBase64(parts[3]
- + "==")));
- rawStatusEntryBuilder.append("<tt>r " + parts[1] + " "
- + parts[2] + " <a href=\""
- + "https://exonerator.torproject.org/"
- + "serverdesc?desc-id="
- + descriptorBase64 + "\" target=\"_blank\">" + parts[3]
- + "</a> " + parts[4] + " " + parts[5] + " " + parts[6]
- + " " + parts[7] + " " + parts[8] + "</tt><br>");
- } else {
- rawStatusEntryBuilder.append("<tt>" + line + "</tt><br>");
- }
- }
- rawStatusEntries.put(validAfter + " " + fingerprint,
- rawStatusEntryBuilder.toString());
- }
- rs.close();
- statement.close();
- if (matches >= 31) {
- queries.clear();
- }
- }
- conn.close();
- this.logger.info("Returned a database connection to the pool "
- + "after " + (System.currentTimeMillis()
- - requestedConnection) + " millis.");
- } catch (SQLException e) {
-
- /* Tell the user we have a database problem. */
- response.sendError(HttpServletResponse.SC_INTERNAL_SERVER_ERROR,
- "Database problem");
- return;
- }
- request.setAttribute("query", query);
- request.setAttribute("queryTime", System.currentTimeMillis()
- - startedQuery);
- request.setAttribute("foundDescriptors", foundDescriptors);
- request.setAttribute("rawValidAfterLines", rawValidAfterLines);
- request.setAttribute("rawStatusEntries", rawStatusEntries);
- request.setAttribute("matches", matches);
-
- /* We're done. Let the JSP do the rest. */
- request.getRequestDispatcher("WEB-INF/relay-search.jsp").forward(
- request, response);
- }
-}
-
diff --git a/website/src/org/torproject/ernie/web/GraphsServlet.java b/website/src/org/torproject/ernie/web/GraphsServlet.java
deleted file mode 100644
index 111aa0f..0000000
--- a/website/src/org/torproject/ernie/web/GraphsServlet.java
+++ /dev/null
@@ -1,24 +0,0 @@
-/* Copyright 2011, 2012 The Tor Project
- * See LICENSE for licensing information */
-package org.torproject.ernie.web;
-
-import java.io.IOException;
-
-import javax.servlet.ServletException;
-import javax.servlet.http.HttpServlet;
-import javax.servlet.http.HttpServletRequest;
-import javax.servlet.http.HttpServletResponse;
-
-public class GraphsServlet extends HttpServlet {
-
- private static final long serialVersionUID = 7615715032362498151L;
-
- public void doGet(HttpServletRequest request,
- HttpServletResponse response) throws IOException, ServletException {
-
- /* Forward the request to the JSP that does all the hard work. */
- request.getRequestDispatcher("WEB-INF/graphs.jsp").forward(request,
- response);
- }
-}
-
diff --git a/website/src/org/torproject/ernie/web/IndexServlet.java b/website/src/org/torproject/ernie/web/IndexServlet.java
deleted file mode 100644
index 11aff7c..0000000
--- a/website/src/org/torproject/ernie/web/IndexServlet.java
+++ /dev/null
@@ -1,24 +0,0 @@
-/* Copyright 2011, 2012 The Tor Project
- * See LICENSE for licensing information */
-package org.torproject.ernie.web;
-
-import java.io.IOException;
-
-import javax.servlet.ServletException;
-import javax.servlet.http.HttpServlet;
-import javax.servlet.http.HttpServletRequest;
-import javax.servlet.http.HttpServletResponse;
-
-public class IndexServlet extends HttpServlet {
-
- private static final long serialVersionUID = 7871368999788994664L;
-
- public void doGet(HttpServletRequest request,
- HttpServletResponse response) throws IOException, ServletException {
-
- /* Forward the request to the JSP that does all the hard work. */
- request.getRequestDispatcher("WEB-INF/index.jsp").forward(request,
- response);
- }
-}
-
diff --git a/website/src/org/torproject/ernie/web/ResearchServlet.java b/website/src/org/torproject/ernie/web/ResearchServlet.java
deleted file mode 100644
index c78be69..0000000
--- a/website/src/org/torproject/ernie/web/ResearchServlet.java
+++ /dev/null
@@ -1,24 +0,0 @@
-/* Copyright 2011, 2012 The Tor Project
- * See LICENSE for licensing information */
-package org.torproject.ernie.web;
-
-import java.io.IOException;
-
-import javax.servlet.ServletException;
-import javax.servlet.http.HttpServlet;
-import javax.servlet.http.HttpServletRequest;
-import javax.servlet.http.HttpServletResponse;
-
-public class ResearchServlet extends HttpServlet {
-
- private static final long serialVersionUID = -9151727188925700665L;
-
- public void doGet(HttpServletRequest request,
- HttpServletResponse response) throws IOException, ServletException {
-
- /* Forward the request to the JSP that does all the hard work. */
- request.getRequestDispatcher("WEB-INF/research.jsp").forward(request,
- response);
- }
-}
-
diff --git a/website/src/org/torproject/ernie/web/StatusServlet.java b/website/src/org/torproject/ernie/web/StatusServlet.java
deleted file mode 100644
index 07790ec..0000000
--- a/website/src/org/torproject/ernie/web/StatusServlet.java
+++ /dev/null
@@ -1,24 +0,0 @@
-/* Copyright 2011, 2012 The Tor Project
- * See LICENSE for licensing information */
-package org.torproject.ernie.web;
-
-import java.io.IOException;
-
-import javax.servlet.ServletException;
-import javax.servlet.http.HttpServlet;
-import javax.servlet.http.HttpServletRequest;
-import javax.servlet.http.HttpServletResponse;
-
-public class StatusServlet extends HttpServlet {
-
- private static final long serialVersionUID = -7249872082399236981L;
-
- public void doGet(HttpServletRequest request,
- HttpServletResponse response) throws IOException, ServletException {
-
- /* Forward the request to the JSP that does all the hard work. */
- request.getRequestDispatcher("WEB-INF/status.jsp").forward(request,
- response);
- }
-}
-
diff --git a/website/src/org/torproject/ernie/web/graphs/BubblesServlet.java b/website/src/org/torproject/ernie/web/graphs/BubblesServlet.java
deleted file mode 100644
index 6f66413..0000000
--- a/website/src/org/torproject/ernie/web/graphs/BubblesServlet.java
+++ /dev/null
@@ -1,24 +0,0 @@
-/* Copyright 2013 The Tor Project
- * See LICENSE for licensing information */
-package org.torproject.ernie.web.graphs;
-
-import java.io.IOException;
-
-import javax.servlet.ServletException;
-import javax.servlet.http.HttpServlet;
-import javax.servlet.http.HttpServletRequest;
-import javax.servlet.http.HttpServletResponse;
-
-public class BubblesServlet extends HttpServlet {
-
- private static final long serialVersionUID = -6011833075497881033L;
-
- public void doGet(HttpServletRequest request,
- HttpServletResponse response) throws IOException, ServletException {
-
- /* Forward the request to the JSP that does all the hard work. */
- request.getRequestDispatcher("WEB-INF/bubbles.jsp").forward(request,
- response);
- }
-}
-
diff --git a/website/src/org/torproject/ernie/web/graphs/Countries.java b/website/src/org/torproject/ernie/web/graphs/Countries.java
deleted file mode 100644
index 93dac4c..0000000
--- a/website/src/org/torproject/ernie/web/graphs/Countries.java
+++ /dev/null
@@ -1,285 +0,0 @@
-/* Copyright 2011, 2012 The Tor Project
- * See LICENSE for licensing information */
-package org.torproject.ernie.web.graphs;
-
-import java.util.ArrayList;
-import java.util.List;
-
-public class Countries {
-
- private static Countries instance = new Countries();
-
- public static Countries getInstance() {
- return Countries.instance;
- }
-
- /* List of arrays of length 2, containing country codes at [0] and
- * country names at [1], alphabetically ordered by country names. */
- private List<String[]> knownCountries;
-
- private Countries() {
- this.knownCountries = new ArrayList<String[]>();
- this.knownCountries.add("af;Afghanistan".split(";"));
- this.knownCountries.add("ax;Aland Islands".split(";"));
- this.knownCountries.add("al;Albania".split(";"));
- this.knownCountries.add("dz;Algeria".split(";"));
- this.knownCountries.add("as;American Samoa".split(";"));
- this.knownCountries.add("ad;Andorra".split(";"));
- this.knownCountries.add("ao;Angola".split(";"));
- this.knownCountries.add("ai;Anguilla".split(";"));
- this.knownCountries.add("aq;Antarctica".split(";"));
- this.knownCountries.add("ag;Antigua and Barbuda".split(";"));
- this.knownCountries.add("ar;Argentina".split(";"));
- this.knownCountries.add("am;Armenia".split(";"));
- this.knownCountries.add("aw;Aruba".split(";"));
- this.knownCountries.add("au;Australia".split(";"));
- this.knownCountries.add("at;Austria".split(";"));
- this.knownCountries.add("az;Azerbaijan".split(";"));
- this.knownCountries.add("bs;Bahamas".split(";"));
- this.knownCountries.add("bh;Bahrain".split(";"));
- this.knownCountries.add("bd;Bangladesh".split(";"));
- this.knownCountries.add("bb;Barbados".split(";"));
- this.knownCountries.add("by;Belarus".split(";"));
- this.knownCountries.add("be;Belgium".split(";"));
- this.knownCountries.add("bz;Belize".split(";"));
- this.knownCountries.add("bj;Benin".split(";"));
- this.knownCountries.add("bm;Bermuda".split(";"));
- this.knownCountries.add("bt;Bhutan".split(";"));
- this.knownCountries.add("bo;Bolivia".split(";"));
- this.knownCountries.add("ba;Bosnia and Herzegovina".split(";"));
- this.knownCountries.add("bw;Botswana".split(";"));
- this.knownCountries.add("bv;Bouvet Island".split(";"));
- this.knownCountries.add("br;Brazil".split(";"));
- this.knownCountries.add("io;British Indian Ocean Territory".
- split(";"));
- this.knownCountries.add("bn;Brunei".split(";"));
- this.knownCountries.add("bg;Bulgaria".split(";"));
- this.knownCountries.add("bf;Burkina Faso".split(";"));
- this.knownCountries.add("mm;Burma".split(";"));
- this.knownCountries.add("bi;Burundi".split(";"));
- this.knownCountries.add("kh;Cambodia".split(";"));
- this.knownCountries.add("cm;Cameroon".split(";"));
- this.knownCountries.add("ca;Canada".split(";"));
- this.knownCountries.add("cv;Cape Verde".split(";"));
- this.knownCountries.add("ky;Cayman Islands".split(";"));
- this.knownCountries.add("cf;Central African Republic".split(";"));
- this.knownCountries.add("td;Chad".split(";"));
- this.knownCountries.add("cl;Chile".split(";"));
- this.knownCountries.add("cn;China".split(";"));
- this.knownCountries.add("cx;Christmas Island".split(";"));
- this.knownCountries.add("cc;Cocos (Keeling) Islands".split(";"));
- this.knownCountries.add("co;Colombia".split(";"));
- this.knownCountries.add("km;Comoros".split(";"));
- this.knownCountries.add("cd;Congo, The Democratic Republic of the".
- split(";"));
- this.knownCountries.add("cg;Congo".split(";"));
- this.knownCountries.add("ck;Cook Islands".split(";"));
- this.knownCountries.add("cr;Costa Rica".split(";"));
- this.knownCountries.add("ci:Côte d'Ivoire".split(":"));
- this.knownCountries.add("hr;Croatia".split(";"));
- this.knownCountries.add("cu;Cuba".split(";"));
- this.knownCountries.add("cy;Cyprus".split(";"));
- this.knownCountries.add("cz;Czech Republic".split(";"));
- this.knownCountries.add("dk;Denmark".split(";"));
- this.knownCountries.add("dj;Djibouti".split(";"));
- this.knownCountries.add("dm;Dominica".split(";"));
- this.knownCountries.add("do;Dominican Republic".split(";"));
- this.knownCountries.add("ec;Ecuador".split(";"));
- this.knownCountries.add("eg;Egypt".split(";"));
- this.knownCountries.add("sv;El Salvador".split(";"));
- this.knownCountries.add("gq;Equatorial Guinea".split(";"));
- this.knownCountries.add("er;Eritrea".split(";"));
- this.knownCountries.add("ee;Estonia".split(";"));
- this.knownCountries.add("et;Ethiopia".split(";"));
- this.knownCountries.add("fk;Falkland Islands (Malvinas)".split(";"));
- this.knownCountries.add("fo;Faroe Islands".split(";"));
- this.knownCountries.add("fj;Fiji".split(";"));
- this.knownCountries.add("fi;Finland".split(";"));
- this.knownCountries.add("fx;France, Metropolitan".split(";"));
- this.knownCountries.add("fr;France".split(";"));
- this.knownCountries.add("gf;French Guiana".split(";"));
- this.knownCountries.add("pf;French Polynesia".split(";"));
- this.knownCountries.add("tf;French Southern Territories".split(";"));
- this.knownCountries.add("ga;Gabon".split(";"));
- this.knownCountries.add("gm;Gambia".split(";"));
- this.knownCountries.add("ge;Georgia".split(";"));
- this.knownCountries.add("de;Germany".split(";"));
- this.knownCountries.add("gh;Ghana".split(";"));
- this.knownCountries.add("gi;Gibraltar".split(";"));
- this.knownCountries.add("gr;Greece".split(";"));
- this.knownCountries.add("gl;Greenland".split(";"));
- this.knownCountries.add("gd;Grenada".split(";"));
- this.knownCountries.add("gp;Guadeloupe".split(";"));
- this.knownCountries.add("gu;Guam".split(";"));
- this.knownCountries.add("gt;Guatemala".split(";"));
- this.knownCountries.add("gg;Guernsey".split(";"));
- this.knownCountries.add("gn;Guinea".split(";"));
- this.knownCountries.add("gw;Guinea-Bissau".split(";"));
- this.knownCountries.add("gy;Guyana".split(";"));
- this.knownCountries.add("ht;Haiti".split(";"));
- this.knownCountries.add("hm;Heard Island and McDonald Islands".
- split(";"));
- this.knownCountries.add("va;Vatican City".split(";"));
- this.knownCountries.add("hn;Honduras".split(";"));
- this.knownCountries.add("hk;Hong Kong".split(";"));
- this.knownCountries.add("hu;Hungary".split(";"));
- this.knownCountries.add("is;Iceland".split(";"));
- this.knownCountries.add("in;India".split(";"));
- this.knownCountries.add("id;Indonesia".split(";"));
- this.knownCountries.add("ir;Iran".split(";"));
- this.knownCountries.add("iq;Iraq".split(";"));
- this.knownCountries.add("ie;Ireland".split(";"));
- this.knownCountries.add("im;Isle of Man".split(";"));
- this.knownCountries.add("il;Israel".split(";"));
- this.knownCountries.add("it;Italy".split(";"));
- this.knownCountries.add("jm;Jamaica".split(";"));
- this.knownCountries.add("jp;Japan".split(";"));
- this.knownCountries.add("je;Jersey".split(";"));
- this.knownCountries.add("jo;Jordan".split(";"));
- this.knownCountries.add("kz;Kazakhstan".split(";"));
- this.knownCountries.add("ke;Kenya".split(";"));
- this.knownCountries.add("ki;Kiribati".split(";"));
- this.knownCountries.add("kp;North Korea".split(";"));
- this.knownCountries.add("kr;Korea, Republic of".split(";"));
- this.knownCountries.add("kw;Kuwait".split(";"));
- this.knownCountries.add("kg;Kyrgyzstan".split(";"));
- this.knownCountries.add("la;Laos".split(";"));
- this.knownCountries.add("lv;Latvia".split(";"));
- this.knownCountries.add("lb;Lebanon".split(";"));
- this.knownCountries.add("ls;Lesotho".split(";"));
- this.knownCountries.add("lr;Liberia".split(";"));
- this.knownCountries.add("ly;Libya".split(";"));
- this.knownCountries.add("li;Liechtenstein".split(";"));
- this.knownCountries.add("lt;Lithuania".split(";"));
- this.knownCountries.add("lu;Luxembourg".split(";"));
- this.knownCountries.add("mo;Macau".split(";"));
- this.knownCountries.add("mk;Macedonia".split(";"));
- this.knownCountries.add("mg;Madagascar".split(";"));
- this.knownCountries.add("mw;Malawi".split(";"));
- this.knownCountries.add("my;Malaysia".split(";"));
- this.knownCountries.add("mv;Maldives".split(";"));
- this.knownCountries.add("ml;Mali".split(";"));
- this.knownCountries.add("mt;Malta".split(";"));
- this.knownCountries.add("mh;Marshall Islands".split(";"));
- this.knownCountries.add("mq;Martinique".split(";"));
- this.knownCountries.add("mr;Mauritania".split(";"));
- this.knownCountries.add("mu;Mauritius".split(";"));
- this.knownCountries.add("yt;Mayotte".split(";"));
- this.knownCountries.add("mx;Mexico".split(";"));
- this.knownCountries.add("fm;Micronesia, Federated States of".
- split(";"));
- this.knownCountries.add("md;Moldova, Republic of".split(";"));
- this.knownCountries.add("mc;Monaco".split(";"));
- this.knownCountries.add("mn;Mongolia".split(";"));
- this.knownCountries.add("me;Montenegro".split(";"));
- this.knownCountries.add("ms;Montserrat".split(";"));
- this.knownCountries.add("ma;Morocco".split(";"));
- this.knownCountries.add("mz;Mozambique".split(";"));
- this.knownCountries.add("mm;Burma".split(";"));
- this.knownCountries.add("na;Namibia".split(";"));
- this.knownCountries.add("nr;Nauru".split(";"));
- this.knownCountries.add("np;Nepal".split(";"));
- this.knownCountries.add("an;Netherlands Antilles".split(";"));
- this.knownCountries.add("nl;Netherlands".split(";"));
- this.knownCountries.add("nc;New Caledonia".split(";"));
- this.knownCountries.add("nz;New Zealand".split(";"));
- this.knownCountries.add("ni;Nicaragua".split(";"));
- this.knownCountries.add("ne;Niger".split(";"));
- this.knownCountries.add("ng;Nigeria".split(";"));
- this.knownCountries.add("nu;Niue".split(";"));
- this.knownCountries.add("nf;Norfolk Island".split(";"));
- this.knownCountries.add("mp;Northern Mariana Islands".split(";"));
- this.knownCountries.add("no;Norway".split(";"));
- this.knownCountries.add("om;Oman".split(";"));
- this.knownCountries.add("pk;Pakistan".split(";"));
- this.knownCountries.add("pw;Palau".split(";"));
- this.knownCountries.add("ps;Palestinian Territory".split(";"));
- this.knownCountries.add("pa;Panama".split(";"));
- this.knownCountries.add("pg;Papua New Guinea".split(";"));
- this.knownCountries.add("py;Paraguay".split(";"));
- this.knownCountries.add("pe;Peru".split(";"));
- this.knownCountries.add("ph;Philippines".split(";"));
- this.knownCountries.add("pn;Pitcairn Islands".split(";"));
- this.knownCountries.add("pl;Poland".split(";"));
- this.knownCountries.add("pt;Portugal".split(";"));
- this.knownCountries.add("pr;Puerto Rico".split(";"));
- this.knownCountries.add("qa;Qatar".split(";"));
- this.knownCountries.add("re;Reunion".split(";"));
- this.knownCountries.add("ro;Romania".split(";"));
- this.knownCountries.add("ru;Russia".split(";"));
- this.knownCountries.add("rw;Rwanda".split(";"));
- this.knownCountries.add("bl;Saint Bartelemey".split(";"));
- this.knownCountries.add("sh;Saint Helena".split(";"));
- this.knownCountries.add("kn;Saint Kitts and Nevis".split(";"));
- this.knownCountries.add("lc;Saint Lucia".split(";"));
- this.knownCountries.add("mf;Saint Martin".split(";"));
- this.knownCountries.add("pm;Saint Pierre and Miquelon".split(";"));
- this.knownCountries.add("vc;Saint Vincent and the Grenadines".
- split(";"));
- this.knownCountries.add("ws;Samoa".split(";"));
- this.knownCountries.add("sm;San Marino".split(";"));
- this.knownCountries.add("st:São Tomé and Príncipe".
- split(":"));
- this.knownCountries.add("sa;Saudi Arabia".split(";"));
- this.knownCountries.add("sn;Senegal".split(";"));
- this.knownCountries.add("rs;Serbia".split(";"));
- this.knownCountries.add("sc;Seychelles".split(";"));
- this.knownCountries.add("sl;Sierra Leone".split(";"));
- this.knownCountries.add("sg;Singapore".split(";"));
- this.knownCountries.add("sk;Slovakia".split(";"));
- this.knownCountries.add("si;Slovenia".split(";"));
- this.knownCountries.add("sb;Solomon Islands".split(";"));
- this.knownCountries.add("so;Somalia".split(";"));
- this.knownCountries.add("za;South Africa".split(";"));
- this.knownCountries.add(("gs;South Georgia and the South Sandwich "
- + "Islands").split(";"));
- this.knownCountries.add("ss;South Sudan".split(";"));
- this.knownCountries.add("es;Spain".split(";"));
- this.knownCountries.add("lk;Sri Lanka".split(";"));
- this.knownCountries.add("sd;Sudan".split(";"));
- this.knownCountries.add("sr;Suriname".split(";"));
- this.knownCountries.add("sj;Svalbard and Jan Mayen".split(";"));
- this.knownCountries.add("sz;Swaziland".split(";"));
- this.knownCountries.add("se;Sweden".split(";"));
- this.knownCountries.add("ch;Switzerland".split(";"));
- this.knownCountries.add("sy;Syrian Arab Republic".split(";"));
- this.knownCountries.add("tw;Taiwan".split(";"));
- this.knownCountries.add("tj;Tajikistan".split(";"));
- this.knownCountries.add("tz;Tanzania, United Republic of".split(";"));
- this.knownCountries.add("th;Thailand".split(";"));
- this.knownCountries.add("tl;East Timor".split(";"));
- this.knownCountries.add("tg;Togo".split(";"));
- this.knownCountries.add("tk;Tokelau".split(";"));
- this.knownCountries.add("to;Tonga".split(";"));
- this.knownCountries.add("tt;Trinidad and Tobago".split(";"));
- this.knownCountries.add("tn;Tunisia".split(";"));
- this.knownCountries.add("tr;Turkey".split(";"));
- this.knownCountries.add("tm;Turkmenistan".split(";"));
- this.knownCountries.add("tc;Turks and Caicos Islands".split(";"));
- this.knownCountries.add("tv;Tuvalu".split(";"));
- this.knownCountries.add("ug;Uganda".split(";"));
- this.knownCountries.add("ua;Ukraine".split(";"));
- this.knownCountries.add("ae;United Arab Emirates".split(";"));
- this.knownCountries.add("gb;United Kingdom".split(";"));
- this.knownCountries.add("um;United States Minor Outlying Islands".
- split(";"));
- this.knownCountries.add("us;United States".split(";"));
- this.knownCountries.add("uy;Uruguay".split(";"));
- this.knownCountries.add("uz;Uzbekistan".split(";"));
- this.knownCountries.add("vu;Vanuatu".split(";"));
- this.knownCountries.add("ve;Venezuela".split(";"));
- this.knownCountries.add("vn;Vietnam".split(";"));
- this.knownCountries.add("vg;Virgin Islands, British".split(";"));
- this.knownCountries.add("vi;Virgin Islands, U.S.".split(";"));
- this.knownCountries.add("wf;Wallis and Futuna".split(";"));
- this.knownCountries.add("eh;Western Sahara".split(";"));
- this.knownCountries.add("ye;Yemen".split(";"));
- this.knownCountries.add("zm;Zambia".split(";"));
- this.knownCountries.add("zw;Zimbabwe".split(";"));
- }
-
- public List<String[]> getCountryList() {
- return this.knownCountries;
- }
-}
-
diff --git a/website/src/org/torproject/ernie/web/graphs/CsvServlet.java b/website/src/org/torproject/ernie/web/graphs/CsvServlet.java
deleted file mode 100644
index 40e3bea..0000000
--- a/website/src/org/torproject/ernie/web/graphs/CsvServlet.java
+++ /dev/null
@@ -1,97 +0,0 @@
-/* Copyright 2011, 2012 The Tor Project
- * See LICENSE for licensing information */
-package org.torproject.ernie.web.graphs;
-
-import java.io.IOException;
-import java.util.SortedSet;
-import java.util.logging.Logger;
-
-import javax.servlet.ServletException;
-import javax.servlet.http.HttpServlet;
-import javax.servlet.http.HttpServletRequest;
-import javax.servlet.http.HttpServletResponse;
-
-/**
- * Servlet that reads an HTTP request for a comma-separated value file,
- * asks the GraphGenerator to generate this file, and returns it to the
- * client.
- */
-public class CsvServlet extends HttpServlet {
-
- private static final long serialVersionUID = 7501442926823719958L;
-
- private RObjectGenerator rObjectGenerator;
-
- /* Available CSV files. */
- private SortedSet<String> availableCsvFiles;
-
- private Logger logger;
-
- public void init() {
-
- /* Initialize logger. */
- this.logger = Logger.getLogger(CsvServlet.class.toString());
-
- /* Get a reference to the R object generator that we need to generate
- * CSV files. */
- this.rObjectGenerator = (RObjectGenerator) getServletContext().
- getAttribute("RObjectGenerator");
- this.availableCsvFiles = rObjectGenerator.getAvailableCsvFiles();
- }
-
- public void doGet(HttpServletRequest request,
- HttpServletResponse response) throws IOException,
- ServletException {
-
- /* Check if the directory listing was requested. */
- String requestURI = request.getRequestURI();
- if (requestURI.equals("/ernie/csv/")) {
- request.setAttribute("directory", "/csv");
- request.setAttribute("extension", ".csv");
- request.setAttribute("files", this.availableCsvFiles);
- request.getRequestDispatcher("/WEB-INF/dir.jsp").forward(request,
- response);
- return;
- }
-
- /* Find out which CSV file was requested and make sure we know this
- * CSV file type. */
- String requestedCsvFile = requestURI;
- if (requestedCsvFile.endsWith(".csv")) {
- requestedCsvFile = requestedCsvFile.substring(0,
- requestedCsvFile.length() - ".csv".length());
- }
- if (requestedCsvFile.contains("/")) {
- requestedCsvFile = requestedCsvFile.substring(requestedCsvFile.
- lastIndexOf("/") + 1);
- }
- if (!availableCsvFiles.contains(requestedCsvFile)) {
- logger.info("Did not recognize requested .csv file from request "
- + "URI: '" + requestURI + "'. Responding with 404 Not Found.");
- response.sendError(HttpServletResponse.SC_NOT_FOUND);
- return;
- }
- logger.fine("CSV file '" + requestedCsvFile + ".csv' requested.");
-
- /* Request CSV file from R object generator, which asks Rserve to
- * generate it. */
- RObject csvFile = this.rObjectGenerator.generateCsv(
- requestedCsvFile, true);
-
- /* Make sure that we have a .csv file to return. */
- if (csvFile == null) {
- response.sendError(HttpServletResponse.SC_INTERNAL_SERVER_ERROR);
- return;
- }
-
- /* Write CSV file to response. */
- String csvFileContent = new String(csvFile.getBytes());
- response.setContentType("text/csv");
- response.setHeader("Content-Length", String.valueOf(
- csvFileContent.length()));
- response.setHeader("Content-Disposition",
- "inline; filename=\"" + requestedCsvFile + ".csv\"");
- response.getWriter().print(csvFileContent);
- }
-}
-
diff --git a/website/src/org/torproject/ernie/web/graphs/GraphImageServlet.java b/website/src/org/torproject/ernie/web/graphs/GraphImageServlet.java
deleted file mode 100644
index b7d0b17..0000000
--- a/website/src/org/torproject/ernie/web/graphs/GraphImageServlet.java
+++ /dev/null
@@ -1,76 +0,0 @@
-/* Copyright 2011, 2012 The Tor Project
- * See LICENSE for licensing information */
-package org.torproject.ernie.web.graphs;
-
-import java.io.BufferedOutputStream;
-import java.io.IOException;
-
-import javax.servlet.ServletException;
-import javax.servlet.http.HttpServlet;
-import javax.servlet.http.HttpServletRequest;
-import javax.servlet.http.HttpServletResponse;
-
-/**
- * Servlet that reads an HTTP request for a graph image, asks the
- * RObjectGenerator to generate this graph if it's not in the cache, and
- * returns the image bytes to the client.
- */
-public class GraphImageServlet extends HttpServlet {
-
- private static final long serialVersionUID = -7356818641689744288L;
-
- private RObjectGenerator rObjectGenerator;
-
- public void init() {
-
- /* Get a reference to the R object generator that we need to generate
- * graph images. */
- this.rObjectGenerator = (RObjectGenerator) getServletContext().
- getAttribute("RObjectGenerator");
- }
-
- public void doGet(HttpServletRequest request,
- HttpServletResponse response) throws IOException,
- ServletException {
-
- /* Find out which graph type was requested and make sure we know this
- * graph type and file type. */
- String requestedGraph = request.getRequestURI();
- String fileType = null;
- if (requestedGraph.endsWith(".png") ||
- requestedGraph.endsWith(".pdf") ||
- requestedGraph.endsWith(".svg")) {
- fileType = requestedGraph.substring(requestedGraph.length() - 3);
- requestedGraph = requestedGraph.substring(0, requestedGraph.length()
- - 4);
- }
- if (requestedGraph.contains("/")) {
- requestedGraph = requestedGraph.substring(requestedGraph.
- lastIndexOf("/") + 1);
- }
-
- /* Request graph from R object generator, which either returns it from
- * its cache or asks Rserve to generate it. */
- RObject graph = rObjectGenerator.generateGraph(requestedGraph,
- fileType, request.getParameterMap(), true);
-
- /* Make sure that we have a graph to return. */
- if (graph == null || graph.getBytes() == null || fileType == null) {
- response.sendError(HttpServletResponse.SC_INTERNAL_SERVER_ERROR);
- return;
- }
-
- /* Write graph bytes to response. */
- BufferedOutputStream output = null;
- response.setContentType("image/" + fileType);
- response.setHeader("Content-Length",
- String.valueOf(graph.getBytes().length));
- response.setHeader("Content-Disposition",
- "inline; filename=\"" + graph.getFileName() + "\"");
- output = new BufferedOutputStream(response.getOutputStream(), 1024);
- output.write(graph.getBytes(), 0, graph.getBytes().length);
- output.flush();
- output.close();
- }
-}
-
diff --git a/website/src/org/torproject/ernie/web/graphs/GraphParameterChecker.java b/website/src/org/torproject/ernie/web/graphs/GraphParameterChecker.java
deleted file mode 100644
index 74ca6f9..0000000
--- a/website/src/org/torproject/ernie/web/graphs/GraphParameterChecker.java
+++ /dev/null
@@ -1,280 +0,0 @@
-/* Copyright 2011, 2012 The Tor Project
- * See LICENSE for licensing information */
-package org.torproject.ernie.web.graphs;
-
-import java.text.ParseException;
-import java.text.SimpleDateFormat;
-import java.util.Arrays;
-import java.util.HashMap;
-import java.util.HashSet;
-import java.util.List;
-import java.util.Map;
-import java.util.Set;
-import java.util.TimeZone;
-import java.util.regex.Pattern;
-
-/**
- * Checks request parameters passed to graph-generating servlets.
- */
-public class GraphParameterChecker {
-
- /**
- * Singleton instance of this class.
- */
- private static GraphParameterChecker instance =
- new GraphParameterChecker();
-
- /**
- * Returns the singleton instance of this class.
- */
- public static GraphParameterChecker getInstance() {
- return instance;
- }
-
- /* Date format for parsing start and end dates. */
- private SimpleDateFormat dateFormat;
-
- /* Available graphs with corresponding parameter lists. */
- private Map<String, String> availableGraphs;
-
- /* Known parameters and parameter values. */
- private Map<String, String> knownParameterValues;
-
- /**
- * Initializes map with valid parameters for each of the graphs.
- */
- public GraphParameterChecker() {
- this.dateFormat = new SimpleDateFormat("yyyy-MM-dd");
- this.dateFormat.setTimeZone(TimeZone.getTimeZone("UTC"));
-
- this.knownParameterValues = new HashMap<String, String>();
- this.knownParameterValues.put("flag",
- "Running,Exit,Guard,Fast,Stable,HSDir");
- StringBuilder sb = new StringBuilder("all");
- for (String[] country : Countries.getInstance().getCountryList()) {
- sb.append("," + country[0]);
- }
- this.knownParameterValues.put("country", sb.toString());
- this.knownParameterValues.put("events", "on,off,points");
- this.knownParameterValues.put("source", "all,siv,moria,torperf");
- this.knownParameterValues.put("filesize", "50kb,1mb,5mb");
- this.knownParameterValues.put("transport",
- "obfs2,obfs3,websocket,<OR>,<??>");
- this.knownParameterValues.put("version", "v4,v6");
- }
-
- public void setAvailableGraphs(Map<String, String> availableGraphs) {
- this.availableGraphs = availableGraphs;
- }
-
- /**
- * Checks request parameters for the given graph type and returns a map
- * of recognized parameters, or null if the graph type doesn't exist or
- * the parameters are invalid.
- */
- public Map<String, String[]> checkParameters(String graphType,
- Map requestParameters) {
-
- /* Check if the graph type exists. */
- if (graphType == null ||
- !this.availableGraphs.containsKey(graphType)) {
- return null;
- }
-
- /* Find out which other parameters are supported by this graph type
- * and parse them if they are given. */
- Set<String> supportedGraphParameters = new HashSet<String>(Arrays.
- asList(this.availableGraphs.get(graphType).split(",")));
- Map<String, String[]> recognizedGraphParameters =
- new HashMap<String, String[]>();
-
- /* Parse start and end dates if supported by the graph type. If no end
- * date is provided, set it to today. If no start date is provided,
- * set it to 90 days before the end date. Make sure that start date
- * precedes end date. */
- if (supportedGraphParameters.contains("start") ||
- supportedGraphParameters.contains("end")) {
- String[] startParameter = (String[]) requestParameters.get("start");
- String[] endParameter = (String[]) requestParameters.get("end");
- long endTimestamp = System.currentTimeMillis();
- if (endParameter != null && endParameter.length > 0 &&
- endParameter[0].length() > 0) {
- try {
- endTimestamp = dateFormat.parse(endParameter[0]).getTime();
- } catch (ParseException e) {
- return null;
- }
- if (!endParameter[0].startsWith("20")) {
- return null;
- }
- }
- endParameter = new String[] { dateFormat.format(endTimestamp) };
- long startTimestamp = endTimestamp - 90L * 24L * 60L * 60L * 1000L;
- if (startParameter != null && startParameter.length > 0 &&
- startParameter[0].length() > 0) {
- try {
- startTimestamp = dateFormat.parse(startParameter[0]).getTime();
- } catch (ParseException e) {
- return null;
- }
- if (!startParameter[0].startsWith("20")) {
- return null;
- }
- }
- startParameter = new String[] { dateFormat.format(startTimestamp) };
- if (startTimestamp > endTimestamp) {
- return null;
- }
- recognizedGraphParameters.put("start", startParameter);
- recognizedGraphParameters.put("end", endParameter);
- }
-
- /* Parse relay flags if supported by the graph type. If no relay flags
- * are passed or none of them have been recognized, use the set of all
- * known flags as default. */
- if (supportedGraphParameters.contains("flag")) {
- String[] flagParameters = (String[]) requestParameters.get("flag");
- List<String> knownFlags = Arrays.asList(
- this.knownParameterValues.get("flag").split(","));
- if (flagParameters != null) {
- for (String flag : flagParameters) {
- if (flag == null || flag.length() == 0 ||
- !knownFlags.contains(flag)) {
- return null;
- }
- }
- } else {
- flagParameters = "Running,Exit,Guard,Fast,Stable".split(",");
- }
- recognizedGraphParameters.put("flag", flagParameters);
- }
-
- /* Parse country codes if supported by the graph type. If no countries
- * are passed, use country code "all" (all countries) as default. */
- if (supportedGraphParameters.contains("country")) {
- String[] countryParameters = (String[]) requestParameters.get(
- "country");
- List<String> knownCountries = Arrays.asList(
- this.knownParameterValues.get("country").split(","));
- if (countryParameters != null) {
- for (String country : countryParameters) {
- if (country == null || country.length() == 0 ||
- !knownCountries.contains(country)) {
- return null;
- }
- }
- } else {
- countryParameters = new String[] { "all" };
- }
- recognizedGraphParameters.put("country", countryParameters);
- }
-
- /* Parse whether the estimated min/max range shall be displayed if
- * supported by the graph type. This parameter can either be "on" or
- * "off," where "off" is the default. */
- if (supportedGraphParameters.contains("events")) {
- String[] eventsParameter = (String[]) requestParameters.get(
- "events");
- List<String> knownRanges = Arrays.asList(
- this.knownParameterValues.get("events").split(","));
- if (eventsParameter != null) {
- if (eventsParameter.length != 1 ||
- eventsParameter[0].length() == 0 ||
- !knownRanges.contains(eventsParameter[0])) {
- return null;
- }
- } else {
- eventsParameter = new String[] { "off" };
- }
- recognizedGraphParameters.put("events", eventsParameter);
- }
-
- /* Parse torperf data source if supported by the graph type. Only a
- * single source can be passed. If no source is passed, use "torperf"
- * as default. */
- if (supportedGraphParameters.contains("source")) {
- String[] sourceParameter = (String[]) requestParameters.get(
- "source");
- List<String> knownSources = Arrays.asList(
- this.knownParameterValues.get("source").split(","));
- if (sourceParameter != null) {
- if (sourceParameter.length != 1) {
- return null;
- }
- if (sourceParameter[0].length() == 0 ||
- !knownSources.contains(sourceParameter[0])) {
- return null;
- }
- } else {
- sourceParameter = new String[] { "all" };
- }
- recognizedGraphParameters.put("source", sourceParameter);
- }
-
- /* Parse torperf file size if supported by the graph type. Only a
- * single file size can be passed. If no file size is passed, use
- * "50kb" as default. */
- if (supportedGraphParameters.contains("filesize")) {
- String[] filesizeParameter = (String[]) requestParameters.get(
- "filesize");
- List<String> knownFilesizes = Arrays.asList(
- this.knownParameterValues.get("filesize").split(","));
- if (filesizeParameter != null) {
- if (filesizeParameter.length != 1) {
- return null;
- }
- if (filesizeParameter[0].length() == 0 ||
- !knownFilesizes.contains(filesizeParameter[0])) {
- return null;
- }
- } else {
- filesizeParameter = new String[] { "50kb" };
- }
- recognizedGraphParameters.put("filesize", filesizeParameter);
- }
-
- /* Parse transports if supported by the graph type. If no transports
- * are passed, use "<OR>" as default. */
- if (supportedGraphParameters.contains("transport")) {
- String[] transportParameters = (String[]) requestParameters.get(
- "transport");
- List<String> knownTransports = Arrays.asList(
- this.knownParameterValues.get("transport").split(","));
- if (transportParameters != null) {
- for (String transport : transportParameters) {
- if (transport == null || transport.length() == 0 ||
- !knownTransports.contains(transport)) {
- return null;
- }
- }
- } else {
- transportParameters = new String[] { "<OR>" };
- }
- recognizedGraphParameters.put("transport", transportParameters);
- }
-
- /* Parse versions if supported by the graph type. If no versions
- * are passed, use "v4" as default. */
- if (supportedGraphParameters.contains("version")) {
- String[] versionParameters = (String[]) requestParameters.get(
- "version");
- List<String> knownVersions = Arrays.asList(
- this.knownParameterValues.get("version").split(","));
- if (versionParameters != null) {
- for (String version : versionParameters) {
- if (version == null || version.length() == 0 ||
- !knownVersions.contains(version)) {
- return null;
- }
- }
- } else {
- versionParameters = new String[] { "v4" };
- }
- recognizedGraphParameters.put("version", versionParameters);
- }
-
- /* We now have a map with all required graph parameters. Return it. */
- return recognizedGraphParameters;
- }
-}
-
diff --git a/website/src/org/torproject/ernie/web/graphs/GraphsSubpagesServlet.java b/website/src/org/torproject/ernie/web/graphs/GraphsSubpagesServlet.java
deleted file mode 100644
index 94d7340..0000000
--- a/website/src/org/torproject/ernie/web/graphs/GraphsSubpagesServlet.java
+++ /dev/null
@@ -1,162 +0,0 @@
-/* Copyright 2011, 2012 The Tor Project
- * See LICENSE for licensing information */
-package org.torproject.ernie.web.graphs;
-
-import java.io.IOException;
-import java.text.SimpleDateFormat;
-import java.util.Arrays;
-import java.util.Date;
-import java.util.HashMap;
-import java.util.HashSet;
-import java.util.List;
-import java.util.Map;
-import java.util.Set;
-import java.util.TimeZone;
-
-import javax.servlet.ServletException;
-import javax.servlet.http.HttpServlet;
-import javax.servlet.http.HttpServletRequest;
-import javax.servlet.http.HttpServletResponse;
-
-public class GraphsSubpagesServlet extends HttpServlet {
-
- private static final long serialVersionUID = -5959829347747628403L;
-
- /* Available graphs subpages with corresponding JSP to which requests
- * are forwarded. */
- private Map<String, String> availableGraphsSubpages;
-
- /* Available tables on graphs subpages. */
- private Map<String, Set<String>> availableGraphsSubpageTables;
-
- /* Country codes and names for per-country graphs. */
- private List<String[]> knownCountries;
-
- /* R object generator for generating table data. */
- private RObjectGenerator rObjectGenerator;
-
- public GraphsSubpagesServlet() {
- this.availableGraphsSubpages = new HashMap<String, String>();
- this.availableGraphsSubpages.put("network.html",
- "WEB-INF/network.jsp");
- this.availableGraphsSubpages.put("fast-exits.html",
- "WEB-INF/fast-exits.jsp");
- this.availableGraphsSubpages.put("users.html", "WEB-INF/users.jsp");
- this.availableGraphsSubpages.put("performance.html",
- "WEB-INF/performance.jsp");
-
- this.availableGraphsSubpageTables =
- new HashMap<String, Set<String>>();
- this.availableGraphsSubpageTables.put("users.html",
- new HashSet<String>(Arrays.asList((
- "direct-users,censorship-events,bridge-users,userstats-relay,"
- + "userstats-censorship-events,userstats-bridge").split(","))));
-
- this.knownCountries = Countries.getInstance().getCountryList();
- }
-
- public void init() {
- /* Get a reference to the R object generator that we need to generate
- * table data. */
- this.rObjectGenerator = (RObjectGenerator) getServletContext().
- getAttribute("RObjectGenerator");
- }
-
- public void doGet(HttpServletRequest request,
- HttpServletResponse response) throws IOException, ServletException {
-
- /* Find out which graph subpage was requested and look up which JSP
- * handles this subpage. */
- String requestedPage = request.getRequestURI();
- if (requestedPage == null) {
- response.sendError(HttpServletResponse.SC_BAD_REQUEST);
- return;
- }
- if (requestedPage.contains("/")) {
- requestedPage = requestedPage.substring(requestedPage.
- lastIndexOf("/") + 1);
- }
- if (!availableGraphsSubpages.containsKey(requestedPage)) {
- response.sendError(HttpServletResponse.SC_INTERNAL_SERVER_ERROR);
- return;
- }
- String jsp = availableGraphsSubpages.get(requestedPage);
-
- /* Find out which graph or table type was requested, if any. */
- String requestedGraph = request.getParameter("graph");
- String requestedTable = request.getParameter("table");
- if (requestedGraph != null) {
-
- /* Check if the passed parameters are valid. */
- Map<String, String[]> checkedParameters = GraphParameterChecker.
- getInstance().checkParameters(requestedGraph,
- request.getParameterMap());
- if (checkedParameters != null) {
-
- /* Set the graph's attributes to the appropriate values, so that
- * we can display the correct graph and prepopulate the form. */
- StringBuilder urlBuilder = new StringBuilder();
- for (Map.Entry<String, String[]> param :
- checkedParameters.entrySet()) {
- request.setAttribute(requestedGraph.replaceAll("-", "_") + "_"
- + param.getKey(), param.getValue());
- for (String paramValue : param.getValue()) {
- urlBuilder.append("&" + param.getKey() + "=" + paramValue);
- }
- }
- String url = "?" + urlBuilder.toString().substring(1);
- request.setAttribute(requestedGraph.replaceAll("-", "_") + "_url",
- url);
- }
- }
- if (requestedTable != null) {
-
- /* Check if the passed parameters are valid. */
- Map<String, String[]> checkedParameters = TableParameterChecker.
- getInstance().checkParameters(requestedTable,
- request.getParameterMap());
- if (checkedParameters != null) {
-
- /* Set the table's attributes to the appropriate values, so that
- * we can prepopulate the form. */
- for (Map.Entry<String, String[]> param :
- checkedParameters.entrySet()) {
- request.setAttribute(requestedTable.replaceAll("-", "_") + "_"
- + param.getKey(), param.getValue());
- }
- }
- }
-
- /* Generate table data if the graphs subpage has any tables,
- * regardless of whether a table update was requested, and add the
- * table data as request attribute. */
- if (this.availableGraphsSubpageTables.containsKey(requestedPage)) {
- for (String tableName :
- this.availableGraphsSubpageTables.get(requestedPage)) {
- List<Map<String, String>> tableData = rObjectGenerator.
- generateTable(tableName, requestedTable,
- request.getParameterMap(), true);
- request.setAttribute(tableName.replaceAll("-", "_")
- + "_tabledata", tableData);
- }
- }
-
- /* Pass list of known countries in case we want to display them. */
- request.setAttribute("countries", this.knownCountries);
-
- /* Pass the default start and end dates. */
- SimpleDateFormat dateFormat = new SimpleDateFormat("yyyy-MM-dd");
- dateFormat.setTimeZone(TimeZone.getTimeZone("UTC"));
- Date defaultEndDate = new Date();
- Date defaultStartDate = new Date(defaultEndDate.getTime()
- - 90L * 24L * 60L * 60L * 1000L);
- request.setAttribute("default_start_date",
- dateFormat.format(defaultStartDate));
- request.setAttribute("default_end_date",
- dateFormat.format(defaultEndDate));
-
- /* Forward the request to the JSP that does all the hard work. */
- request.getRequestDispatcher(jsp).forward(request, response);
- }
-}
-
diff --git a/website/src/org/torproject/ernie/web/graphs/RObject.java b/website/src/org/torproject/ernie/web/graphs/RObject.java
deleted file mode 100644
index cfab819..0000000
--- a/website/src/org/torproject/ernie/web/graphs/RObject.java
+++ /dev/null
@@ -1,23 +0,0 @@
-/* Copyright 2011, 2012 The Tor Project
- * See LICENSE for licensing information */
-package org.torproject.ernie.web.graphs;
-
-public class RObject {
- private byte[] bytes;
- private String fileName;
- private long lastModified;
- public RObject(byte[] bytes, String fileName, long lastModified) {
- this.bytes = bytes;
- this.fileName = fileName;
- this.lastModified = lastModified;
- }
- public String getFileName() {
- return this.fileName;
- }
- public byte[] getBytes() {
- return this.bytes;
- }
- public long getLastModified() {
- return this.lastModified;
- }
-}
diff --git a/website/src/org/torproject/ernie/web/graphs/RObjectGenerator.java b/website/src/org/torproject/ernie/web/graphs/RObjectGenerator.java
deleted file mode 100644
index 927b5c4..0000000
--- a/website/src/org/torproject/ernie/web/graphs/RObjectGenerator.java
+++ /dev/null
@@ -1,398 +0,0 @@
-/* Copyright 2011, 2012 The Tor Project
- * See LICENSE for licensing information */
-package org.torproject.ernie.web.graphs;
-
-import java.io.BufferedInputStream;
-import java.io.BufferedReader;
-import java.io.ByteArrayInputStream;
-import java.io.ByteArrayOutputStream;
-import java.io.File;
-import java.io.FileInputStream;
-import java.io.IOException;
-import java.io.InputStreamReader;
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.HashMap;
-import java.util.HashSet;
-import java.util.List;
-import java.util.Map;
-import java.util.Set;
-import java.util.SortedSet;
-import java.util.TreeSet;
-
-import javax.servlet.ServletContext;
-import javax.servlet.ServletContextEvent;
-import javax.servlet.ServletContextListener;
-
-import org.rosuda.REngine.Rserve.RConnection;
-import org.rosuda.REngine.Rserve.RserveException;
-
-public class RObjectGenerator implements ServletContextListener {
-
- /* Host and port where Rserve is listening. */
- private String rserveHost;
- private int rservePort;
-
- /* Some parameters for our cache of graph images. */
- private String cachedGraphsDirectory;
- private long maxCacheAge;
-
- private SortedSet<String> availableCsvFiles;
- private Map<String, String> availableTables;
- private Map<String, String> availableGraphs;
- private Set<String> availableGraphFileTypes;
-
- public void contextInitialized(ServletContextEvent event) {
-
- /* Initialize using context parameters. */
- ServletContext servletContext = event.getServletContext();
- this.rserveHost = servletContext.getInitParameter("rserveHost");
- this.rservePort = Integer.parseInt(servletContext.getInitParameter(
- "rservePort"));
- this.maxCacheAge = Long.parseLong(servletContext.getInitParameter(
- "maxCacheAge"));
- this.cachedGraphsDirectory = servletContext.getInitParameter(
- "cachedGraphsDir");
-
- /* Initialize map of available CSV files. */
- this.availableCsvFiles = new TreeSet<String>();
- this.availableCsvFiles.add("bandwidth");
- this.availableCsvFiles.add("bandwidth-flags");
- this.availableCsvFiles.add("bwhist-flags");
- this.availableCsvFiles.add("connbidirect");
- this.availableCsvFiles.add("cloudbridges");
- this.availableCsvFiles.add("dirbytes");
- this.availableCsvFiles.add("monthly-userstats-average");
- this.availableCsvFiles.add("monthly-userstats-peak");
- this.availableCsvFiles.add("networksize");
- this.availableCsvFiles.add("platforms");
- this.availableCsvFiles.add("relaycountries");
- this.availableCsvFiles.add("relayflags");
- this.availableCsvFiles.add("torperf");
- this.availableCsvFiles.add("torperf-failures");
- this.availableCsvFiles.add("userstats");
- this.availableCsvFiles.add("userstats-detector");
- this.availableCsvFiles.add("versions");
-
- this.availableTables = new HashMap<String, String>();
- this.availableTables.put("userstats-relay", "start,end,filename");
- this.availableTables.put("userstats-bridge", "start,end,filename");
- this.availableTables.put("userstats-censorship-events",
- "start,end,filename");
- TableParameterChecker.getInstance().setAvailableTables(
- availableTables);
-
- this.availableGraphs = new HashMap<String, String>();
- this.availableGraphs.put("networksize", "start,end,filename");
- this.availableGraphs.put("cloudbridges", "start,end,filename");
- this.availableGraphs.put("relaycountries",
- "start,end,country,filename");
- this.availableGraphs.put("relayflags", "start,end,flag,filename");
- this.availableGraphs.put("versions", "start,end,filename");
- this.availableGraphs.put("platforms", "start,end,filename");
- this.availableGraphs.put("bandwidth", "start,end,filename");
- this.availableGraphs.put("bandwidth-flags", "start,end,filename");
- this.availableGraphs.put("bwhist-flags", "start,end,filename");
- this.availableGraphs.put("dirbytes", "start,end,filename");
- this.availableGraphs.put("torperf",
- "start,end,source,filesize,filename");
- this.availableGraphs.put("torperf-failures",
- "start,end,source,filesize,filename");
- this.availableGraphs.put("connbidirect", "start,end,filename");
- this.availableGraphs.put("fast-exits", "start,end,filename");
- this.availableGraphs.put("almost-fast-exits", "start,end,filename");
- this.availableGraphs.put("userstats-relay-country",
- "start,end,country,events,filename");
- this.availableGraphs.put("userstats-bridge-country",
- "start,end,country,filename");
- this.availableGraphs.put("userstats-bridge-transport",
- "start,end,transport,filename");
- this.availableGraphs.put("userstats-bridge-version",
- "start,end,version,filename");
- this.availableGraphFileTypes = new HashSet<String>(Arrays.asList(
- "png,pdf,svg".split(",")));
- GraphParameterChecker.getInstance().setAvailableGraphs(
- availableGraphs);
-
- /* Register ourself, so that servlets can use us. */
- servletContext.setAttribute("RObjectGenerator", this);
-
- /* Periodically generate R objects with default parameters. */
- new Thread() {
- public void run() {
- long lastUpdated = 0L, sleep;
- while (true) {
- while ((sleep = maxCacheAge * 1000L / 2L + lastUpdated
- - System.currentTimeMillis()) > 0L) {
- try {
- Thread.sleep(sleep);
- } catch (InterruptedException e) {
- }
- }
- for (String csvFile : availableCsvFiles) {
- generateCsv(csvFile, false);
- }
- for (String tableName : availableTables.keySet()) {
- generateTable(tableName, tableName, new HashMap(), false);
- }
- for (String graphName : availableGraphs.keySet()) {
- for (String fileType : availableGraphFileTypes) {
- generateGraph(graphName, fileType, new HashMap(), false);
- }
- }
- lastUpdated = System.currentTimeMillis();
- }
- };
- }.start();
- }
-
- public void contextDestroyed(ServletContextEvent event) {
- /* Nothing to do. */
- }
-
- public RObject generateGraph(String requestedGraph, String fileType,
- Map parameterMap, boolean checkCache) {
- Map<String, String[]> checkedParameters = GraphParameterChecker.
- getInstance().checkParameters(requestedGraph, parameterMap);
- if (checkedParameters == null) {
- /* TODO We're going to take the blame by sending an internal server
- * error to the client, but really the user is to blame. */
- return null;
- }
- StringBuilder rQueryBuilder = new StringBuilder("plot_"
- + requestedGraph.replaceAll("-", "_") + "("),
- imageFilenameBuilder = new StringBuilder(requestedGraph);
- for (Map.Entry<String, String[]> parameter :
- checkedParameters.entrySet()) {
- String parameterName = parameter.getKey();
- String[] parameterValues = parameter.getValue();
- for (String param : parameterValues) {
- imageFilenameBuilder.append("-" + param);
- }
- if (parameterValues.length < 2) {
- rQueryBuilder.append(parameterName + " = '" + parameterValues[0]
- + "', ");
- } else {
- rQueryBuilder.append(parameterName + " = c(");
- for (int i = 0; i < parameterValues.length - 1; i++) {
- rQueryBuilder.append("'" + parameterValues[i] + "', ");
- }
- rQueryBuilder.append("'" + parameterValues[
- parameterValues.length - 1] + "'), ");
- }
- }
- imageFilenameBuilder.append("." + fileType);
- String imageFilename = imageFilenameBuilder.toString();
- rQueryBuilder.append("path = '%s')");
- String rQuery = rQueryBuilder.toString();
- File imageFile = new File(this.cachedGraphsDirectory + "/"
- + imageFilename);
- return this.generateRObject(rQuery, imageFile, imageFilename,
- checkCache);
- }
-
- public SortedSet<String> getAvailableCsvFiles() {
- return this.availableCsvFiles;
- }
-
- public RObject generateCsv(String requestedCsvFile,
- boolean checkCache) {
- /* Prepare filename and R query string. */
- String rQuery = "export_" + requestedCsvFile.replaceAll("-", "_")
- + "(path = '%s')";
- String csvFilename = requestedCsvFile + ".csv";
-
- /* See if we need to generate this .csv file. */
- File csvFile = new File(this.cachedGraphsDirectory + "/"
- + csvFilename);
- return this.generateRObject(rQuery, csvFile, csvFilename, checkCache);
- }
-
- public List<Map<String, String>> generateTable(String tableName,
- String requestedTable, Map parameterMap, boolean checkCache) {
-
- Map<String, String[]> checkedParameters = null;
- if (tableName.equals(requestedTable)) {
- checkedParameters = TableParameterChecker.
- getInstance().checkParameters(requestedTable,
- parameterMap);
- } else {
- checkedParameters = TableParameterChecker.
- getInstance().checkParameters(tableName, null);
- }
- if (checkedParameters == null) {
- /* TODO We're going to take the blame by sending an internal server
- * error to the client, but really the user is to blame. */
- return null;
- }
- StringBuilder rQueryBuilder = new StringBuilder("write_"
- + tableName.replaceAll("-", "_") + "("),
- tableFilenameBuilder = new StringBuilder(tableName);
-
- for (Map.Entry<String, String[]> parameter :
- checkedParameters.entrySet()) {
- String parameterName = parameter.getKey();
- String[] parameterValues = parameter.getValue();
- for (String param : parameterValues) {
- tableFilenameBuilder.append("-" + param);
- }
- if (parameterValues.length < 2) {
- rQueryBuilder.append(parameterName + " = '"
- + parameterValues[0] + "', ");
- } else {
- rQueryBuilder.append(parameterName + " = c(");
- for (int i = 0; i < parameterValues.length - 1; i++) {
- rQueryBuilder.append("'" + parameterValues[i] + "', ");
- }
- rQueryBuilder.append("'" + parameterValues[
- parameterValues.length - 1] + "'), ");
- }
- }
- tableFilenameBuilder.append(".tbl");
- String tableFilename = tableFilenameBuilder.toString();
- rQueryBuilder.append("path = '%s')");
- String rQuery = rQueryBuilder.toString();
- return this.generateTable(rQuery, tableFilename, checkCache);
- }
-
- /* Generate table data using the given R query and filename or read
- * previously generated table data from disk if it's not too old and
- * return table data. */
- private List<Map<String, String>> generateTable(String rQuery,
- String tableFilename, boolean checkCache) {
-
- /* See if we need to generate this table. */
- File tableFile = new File(this.cachedGraphsDirectory + "/"
- + tableFilename);
- byte[] tableBytes = this.generateRObject(rQuery, tableFile,
- tableFilename, checkCache).getBytes();
-
- /* Write the table content to a map. */
- List<Map<String, String>> result = null;
- try {
- result = new ArrayList<Map<String, String>>();
- BufferedReader br = new BufferedReader(new InputStreamReader(
- new ByteArrayInputStream(tableBytes)));
- String line = br.readLine();
- if (line != null) {
- List<String> headers = new ArrayList<String>(Arrays.asList(
- line.split(",")));
- while ((line = br.readLine()) != null) {
- String[] parts = line.split(",");
- if (headers.size() != parts.length) {
- return null;
- }
- Map<String, String> row = new HashMap<String, String>();
- for (int i = 0; i < headers.size(); i++) {
- row.put(headers.get(i), parts[i]);
- }
- result.add(row);
- }
- }
- } catch (IOException e) {
- return null;
- }
-
- /* Return table values. */
- return result;
- }
-
- /* Generate an R object in a separate worker thread, or wait for an
- * already running worker thread to finish and get its result. */
- private RObject generateRObject(String rQuery, File rObjectFile,
- String fileName, boolean checkCache) {
- RObjectGeneratorWorker worker = null;
- synchronized (this.rObjectGeneratorThreads) {
- if (this.rObjectGeneratorThreads.containsKey(rQuery)) {
- worker = this.rObjectGeneratorThreads.get(rQuery);
- } else {
- worker = new RObjectGeneratorWorker(rQuery, rObjectFile,
- fileName, checkCache);
- this.rObjectGeneratorThreads.put(rQuery, worker);
- worker.start();
- }
- }
- try {
- worker.join();
- } catch (InterruptedException e) {
- }
- synchronized (this.rObjectGeneratorThreads) {
- if (this.rObjectGeneratorThreads.containsKey(rQuery) &&
- this.rObjectGeneratorThreads.get(rQuery) == worker) {
- this.rObjectGeneratorThreads.remove(rQuery);
- }
- }
- return worker.getRObject();
- }
-
- private Map<String, RObjectGeneratorWorker> rObjectGeneratorThreads =
- new HashMap<String, RObjectGeneratorWorker>();
-
- private class RObjectGeneratorWorker extends Thread {
-
- private String rQuery;
- private File rObjectFile;
- private String fileName;
- private boolean checkCache;
- private RObject result = null;
-
- public RObjectGeneratorWorker(String rQuery, File rObjectFile,
- String fileName, boolean checkCache) {
- this.rQuery = rQuery;
- this.rObjectFile = rObjectFile;
- this.fileName = fileName;
- this.checkCache = checkCache;
- }
-
- public void run() {
-
- /* See if we need to generate this R object. */
- long now = System.currentTimeMillis();
- if (!this.checkCache || !this.rObjectFile.exists() ||
- this.rObjectFile.lastModified() < now - maxCacheAge * 1000L) {
-
- /* We do. Update the R query to contain the absolute path to the
- * file to be generated, create a connection to Rserve, run the R
- * query, and close the connection. The generated object will be
- * on disk. */
- this.rQuery = String.format(this.rQuery,
- this.rObjectFile.getAbsolutePath());
- try {
- RConnection rc = new RConnection(rserveHost, rservePort);
- rc.eval(this.rQuery);
- rc.close();
- } catch (RserveException e) {
- return;
- }
-
- /* Check that we really just generated the R object. */
- if (!this.rObjectFile.exists() || this.rObjectFile.lastModified()
- < now - maxCacheAge * 1000L) {
- return;
- }
- }
-
- /* Read the R object from disk and write it to a byte array. */
- long lastModified = this.rObjectFile.lastModified();
- try {
- BufferedInputStream bis = new BufferedInputStream(
- new FileInputStream(this.rObjectFile), 1024);
- ByteArrayOutputStream baos = new ByteArrayOutputStream();
- byte[] buffer = new byte[1024];
- int length;
- while ((length = bis.read(buffer)) > 0) {
- baos.write(buffer, 0, length);
- }
- bis.close();
- this.result = new RObject(baos.toByteArray(), this.fileName,
- lastModified);
- } catch (IOException e) {
- return;
- }
- }
-
- public RObject getRObject() {
- return this.result;
- }
- }
-}
diff --git a/website/src/org/torproject/ernie/web/graphs/TableParameterChecker.java b/website/src/org/torproject/ernie/web/graphs/TableParameterChecker.java
deleted file mode 100644
index 02a0c85..0000000
--- a/website/src/org/torproject/ernie/web/graphs/TableParameterChecker.java
+++ /dev/null
@@ -1,120 +0,0 @@
-/* Copyright 2011, 2012 The Tor Project
- * See LICENSE for licensing information */
-package org.torproject.ernie.web.graphs;
-
-import java.text.ParseException;
-import java.text.SimpleDateFormat;
-import java.util.Arrays;
-import java.util.HashMap;
-import java.util.HashSet;
-import java.util.Map;
-import java.util.Set;
-import java.util.TimeZone;
-
-/**
- * Checks request parameters passed to generate tables.
- */
-public class TableParameterChecker {
-
- /**
- * Singleton instance of this class.
- */
- private static TableParameterChecker instance =
- new TableParameterChecker();
-
- /**
- * Returns the singleton instance of this class.
- */
- public static TableParameterChecker getInstance() {
- return instance;
- }
-
- /* Date format for parsing start and end dates. */
- private SimpleDateFormat dateFormat;
-
- /* Available tables with corresponding parameter lists. */
- private Map<String, String> availableTables;
-
- /**
- * Initializes map with valid parameters for each of the graphs.
- */
- public TableParameterChecker() {
- this.dateFormat = new SimpleDateFormat("yyyy-MM-dd");
- this.dateFormat.setTimeZone(TimeZone.getTimeZone("UTC"));
- }
-
- public void setAvailableTables(Map<String, String> availableTables) {
- this.availableTables = availableTables;
- }
-
- /**
- * Checks request parameters for the given table type and returns a map
- * of recognized parameters, or null if the table type doesn't exist or
- * the parameters are invalid.
- */
- public Map<String, String[]> checkParameters(String tableType,
- Map requestParameters) {
-
- /* Check if the table type exists. */
- if (tableType == null ||
- !this.availableTables.containsKey(tableType)) {
- return null;
- }
-
- /* Find out which other parameters are supported by this table type
- * and parse them if they are given. */
- Set<String> supportedTableParameters = new HashSet<String>(Arrays.
- asList(this.availableTables.get(tableType).split(",")));
- Map<String, String[]> recognizedTableParameters =
- new HashMap<String, String[]>();
-
- /* Parse start and end dates if supported by the table type. If no end
- * date is provided, set it to today. If no start date is provided,
- * set it to 90 days before the end date. Make sure that start date
- * precedes end date. */
- if (supportedTableParameters.contains("start") ||
- supportedTableParameters.contains("end")) {
- String[] startParameter = null;
- String[] endParameter = null;
- if (requestParameters != null) {
- startParameter = (String[]) requestParameters.get("start");
- endParameter = (String[]) requestParameters.get("end");
- }
- long endTimestamp = System.currentTimeMillis();
- if (endParameter != null && endParameter.length > 0 &&
- endParameter[0].length() > 0) {
- try {
- endTimestamp = dateFormat.parse(endParameter[0]).getTime();
- } catch (ParseException e) {
- return null;
- }
- if (!endParameter[0].startsWith("20")) {
- return null;
- }
- }
- endParameter = new String[] { dateFormat.format(endTimestamp) };
- long startTimestamp = endTimestamp - 90L * 24L * 60L * 60L * 1000L;
- if (startParameter != null && startParameter.length > 0 &&
- startParameter[0].length() > 0) {
- try {
- startTimestamp = dateFormat.parse(startParameter[0]).getTime();
- } catch (ParseException e) {
- return null;
- }
- if (!startParameter[0].startsWith("20")) {
- return null;
- }
- }
- startParameter = new String[] { dateFormat.format(startTimestamp) };
- if (startTimestamp > endTimestamp) {
- return null;
- }
- recognizedTableParameters.put("start", startParameter);
- recognizedTableParameters.put("end", endParameter);
- }
-
- /* We now have a map with all required table parameters. Return it. */
- return recognizedTableParameters;
- }
-}
-
diff --git a/website/src/org/torproject/ernie/web/research/ResearchDataServlet.java b/website/src/org/torproject/ernie/web/research/ResearchDataServlet.java
deleted file mode 100644
index 6c24e0d..0000000
--- a/website/src/org/torproject/ernie/web/research/ResearchDataServlet.java
+++ /dev/null
@@ -1,260 +0,0 @@
-/* Copyright 2011, 2012 The Tor Project
- * See LICENSE for licensing information */
-package org.torproject.ernie.web.research;
-
-import java.io.File;
-import java.io.IOException;
-import java.text.ParseException;
-import java.text.SimpleDateFormat;
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.Date;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-import java.util.SortedMap;
-import java.util.TreeMap;
-
-import javax.servlet.ServletException;
-import javax.servlet.http.HttpServlet;
-import javax.servlet.http.HttpServletRequest;
-import javax.servlet.http.HttpServletResponse;
-
-/**
- * Controller servlet for the Data page. Prepares the various lists of
- * downloadable metrics data files by parsing a file with URLs on other
- * servers and looking at a local directory with files served by local
- * Apache HTTP server. The file with URLs on other servers may contain
- * comment lines starting with #. Recognizes metrics data file types from
- * the file names.
- */
-public class ResearchDataServlet extends HttpServlet {
-
- private static final long serialVersionUID = -5168280373350515577L;
-
- public void doGet(HttpServletRequest request,
- HttpServletResponse response) throws IOException, ServletException {
-
- /* Read local directory with files served by the local Apache HTTP
- * server and add the URLs to the list. */
- List<String> dataFileUrls = new ArrayList<String>();
- String localDataDir = getServletConfig().getInitParameter(
- "localDataDir");
- if (localDataDir != null) {
- try {
- File localDataDirFile = new File(localDataDir);
- if (localDataDirFile.exists() && localDataDirFile.isDirectory()) {
- for (File localDataFile : localDataDirFile.listFiles()) {
- if (!localDataFile.isDirectory()) {
- dataFileUrls.add("/data/" + localDataFile.getName());
- }
- }
- }
- } catch (SecurityException e) {
- /* We're not permitted to read the directory with metrics data
- * files. Ignore. */
- }
- }
-
- /* Prepare data structures that we're going to pass to the JSP. All
- * data structures are (nested) maps with the map keys being used for
- * displaying the files in tables and map values being 2-element
- * arrays containing the file url and optional signature file. */
- SortedMap<Date, Map<String, String[]>> relayDescriptors =
- new TreeMap<Date, Map<String, String[]>>(
- java.util.Collections.reverseOrder());
- String[] certs = new String[2];
- SortedMap<Date, String[]> bridgeDescriptors =
- new TreeMap<Date, String[]>(java.util.Collections.reverseOrder());
- String[] relayStatistics = new String[2];
- SortedMap<Date, String[]> torperfTarballs =
- new TreeMap<Date, String[]>(java.util.Collections.reverseOrder());
- SortedMap<String, Map<String, String[]>> torperfData =
- new TreeMap<String, Map<String, String[]>>();
- SortedMap<Date, String[]> exitLists =
- new TreeMap<Date, String[]>(java.util.Collections.reverseOrder());
- SortedMap<Date, String[]> torperfExperiments =
- new TreeMap<Date, String[]>();
- SortedMap<Date, String[]> bridgePoolAssignments =
- new TreeMap<Date, String[]>(java.util.Collections.reverseOrder());
-
- /* Prepare rewriting Torperf sources. */
- Map<String, String> torperfSources = new HashMap<String, String>();
- torperfSources.put("torperffast", "torperf, fastest");
- torperfSources.put("torperffastratio", "torperf, best ratio");
- torperfSources.put("torperfslow", "torperf, slowest");
- torperfSources.put("torperfslowratio", "torperf, worst ratio");
-
- /* Go through the file list, decide for each file what metrics data
- * type it is, and put it in the appropriate map. */
- SimpleDateFormat monthFormat = new SimpleDateFormat("yyyy-MM");
- SimpleDateFormat dateFormat = new SimpleDateFormat("yyyy-MM-dd");
- List<String> torperfFilesizes = Arrays.asList("50kb,1mb,5mb".
- split(","));
- for (String url : dataFileUrls) {
- if (!url.contains("/")) {
- continue;
- }
- String filename = url.substring(url.lastIndexOf("/") + 1);
-
- /* URL contains relay descriptors. */
- if (filename.startsWith("tor-20") ||
- filename.startsWith("statuses-20") ||
- filename.startsWith("server-descriptors-20") ||
- filename.startsWith("extra-infos-20") ||
- filename.startsWith("votes-20") ||
- filename.startsWith("consensuses-20")) {
- String type = filename.substring(0, filename.indexOf("-20"));
- String yearMonth = filename.substring(filename.indexOf("20"));
- yearMonth = yearMonth.substring(0, 7);
- Date month = null;
- try {
- month = monthFormat.parse(yearMonth);
- } catch (ParseException e) {
- /* Ignore this URL. */
- continue;
- }
- int index = filename.endsWith(".asc") ? 1 : 0;
- if (!relayDescriptors.containsKey(month)) {
- relayDescriptors.put(month, new HashMap<String, String[]>());
- }
- if (!relayDescriptors.get(month).containsKey(type)) {
- relayDescriptors.get(month).put(type, new String[2]);
- }
- relayDescriptors.get(month).get(type)[index] = url;
-
- /* URL contains v3 certificates. */
- } else if (filename.startsWith("certs.tar")) {
- int index = filename.endsWith(".asc") ? 1 : 0;
- certs[index] = url;
-
- /* URL contains bridge descriptors. */
- } else if (filename.startsWith("bridge-descriptors-20")) {
- String yearMonth = filename.substring(filename.indexOf("20"));
- yearMonth = yearMonth.substring(0, 7);
- Date month = null;
- try {
- month = monthFormat.parse(yearMonth);
- } catch (ParseException e) {
- /* Ignore this URL. */
- continue;
- }
- int index = filename.endsWith(".asc") ? 1 : 0;
- if (!bridgeDescriptors.containsKey(month)) {
- bridgeDescriptors.put(month, new String[2]);
- }
- bridgeDescriptors.get(month)[index] = url;
-
- /* URL contains relay statistics. */
- } else if (filename.startsWith("relay-statistics.tar.bz2")) {
- int index = filename.endsWith(".asc") ? 1 : 0;
- relayStatistics[index] = url;
-
- /* URL contains Torperf tarball. */
- } else if (filename.startsWith("torperf-20")) {
- String yearMonth = filename.substring(filename.indexOf("20"));
- yearMonth = yearMonth.substring(0, 7);
- Date month = null;
- try {
- month = monthFormat.parse(yearMonth);
- } catch (ParseException e) {
- /* Ignore this URL. */
- continue;
- }
- if (!torperfTarballs.containsKey(month)) {
- torperfTarballs.put(month, new String[2]);
- }
- torperfTarballs.get(month)[0] = url;
-
- /* URL contains Torperf data file. */
- } else if (filename.endsWith("b.data") ||
- filename.endsWith("b.extradata")) {
- boolean isExtraData = filename.endsWith("b.extradata");
- String[] parts = filename.split("-");
- if (parts.length != 2) {
- continue;
- }
- String source = parts[0];
- if (torperfSources.containsKey(source)) {
- source = torperfSources.get(source);
- }
- String filesize = parts[1];
- filesize = filesize.substring(0, filesize.length()
- - (isExtraData ? 10 : 5));
- if (!torperfFilesizes.contains(filesize)) {
- continue;
- }
- if (!torperfData.containsKey(source)) {
- torperfData.put(source, new HashMap<String, String[]>());
- }
- if (!torperfData.get(source).containsKey(filesize)) {
- torperfData.get(source).put(filesize, new String[2]);
- }
- torperfData.get(source).get(filesize)[isExtraData ? 1 : 0] = url;
-
- /* URL contains Torperf experiment tarball. */
- } else if (filename.startsWith("torperf-experiment-20")) {
- String dateString = filename.substring(filename.indexOf("20"));
- dateString = dateString.substring(0, 10);
- Date date = null;
- try {
- date = dateFormat.parse(dateString);
- } catch (ParseException e) {
- /* Ignore this URL. */
- continue;
- }
- if (!torperfExperiments.containsKey(date)) {
- torperfExperiments.put(date, new String[2]);
- }
- torperfExperiments.get(date)[0] = url;
-
- /* URL contains exit list. */
- } else if (filename.startsWith("exit-list-20")) {
- String yearMonth = filename.substring(filename.indexOf("20"));
- yearMonth = yearMonth.substring(0, 7);
- Date month = null;
- try {
- month = monthFormat.parse(yearMonth);
- } catch (ParseException e) {
- /* Ignore this URL. */
- continue;
- }
- if (!exitLists.containsKey(month)) {
- exitLists.put(month, new String[2]);
- }
- exitLists.get(month)[0] = url;
-
- /* URL contains bridge pool assignments. */
- } else if (filename.startsWith("bridge-pool-assignments-20")) {
- String yearMonth = filename.substring(filename.indexOf("20"));
- yearMonth = yearMonth.substring(0, 7);
- Date month = null;
- try {
- month = monthFormat.parse(yearMonth);
- } catch (ParseException e) {
- /* Ignore this URL. */
- continue;
- }
- if (!bridgePoolAssignments.containsKey(month)) {
- bridgePoolAssignments.put(month, new String[2]);
- }
- bridgePoolAssignments.get(month)[0] = url;
- }
- }
-
- /* Add the maps to the request and forward it to the JSP to display
- * the page. */
- request.setAttribute("relayDescriptors", relayDescriptors);
- request.setAttribute("certs", certs);
- request.setAttribute("bridgeDescriptors", bridgeDescriptors);
- request.setAttribute("relayStatistics", relayStatistics);
- request.setAttribute("torperfData", torperfData);
- request.setAttribute("exitLists", exitLists);
- request.setAttribute("torperfTarballs", torperfTarballs);
- request.setAttribute("torperfExperiments", torperfExperiments);
- request.setAttribute("bridgePoolAssignments", bridgePoolAssignments);
- request.getRequestDispatcher("WEB-INF/data.jsp").forward(request,
- response);
- }
-}
-
diff --git a/website/src/org/torproject/ernie/web/research/ResearchFormatsServlet.java b/website/src/org/torproject/ernie/web/research/ResearchFormatsServlet.java
deleted file mode 100644
index 3b70ca3..0000000
--- a/website/src/org/torproject/ernie/web/research/ResearchFormatsServlet.java
+++ /dev/null
@@ -1,24 +0,0 @@
-/* Copyright 2011, 2012 The Tor Project
- * See LICENSE for licensing information */
-package org.torproject.ernie.web.research;
-
-import java.io.IOException;
-
-import javax.servlet.ServletException;
-import javax.servlet.http.HttpServlet;
-import javax.servlet.http.HttpServletRequest;
-import javax.servlet.http.HttpServletResponse;
-
-public class ResearchFormatsServlet extends HttpServlet {
-
- private static final long serialVersionUID = 5666493868675314116L;
-
- public void doGet(HttpServletRequest request,
- HttpServletResponse response) throws IOException, ServletException {
-
- /* Forward the request to the JSP that does all the hard work. */
- request.getRequestDispatcher("WEB-INF/formats.jsp").forward(request,
- response);
- }
-}
-
diff --git a/website/src/org/torproject/ernie/web/research/ResearchPapersServlet.java b/website/src/org/torproject/ernie/web/research/ResearchPapersServlet.java
deleted file mode 100644
index a63eef0..0000000
--- a/website/src/org/torproject/ernie/web/research/ResearchPapersServlet.java
+++ /dev/null
@@ -1,24 +0,0 @@
-/* Copyright 2011, 2012 The Tor Project
- * See LICENSE for licensing information */
-package org.torproject.ernie.web.research;
-
-import java.io.IOException;
-
-import javax.servlet.ServletException;
-import javax.servlet.http.HttpServlet;
-import javax.servlet.http.HttpServletRequest;
-import javax.servlet.http.HttpServletResponse;
-
-public class ResearchPapersServlet extends HttpServlet {
-
- private static final long serialVersionUID = -8135459207158536268L;
-
- public void doGet(HttpServletRequest request,
- HttpServletResponse response) throws IOException, ServletException {
-
- /* Forward the request to the JSP that does all the hard work. */
- request.getRequestDispatcher("WEB-INF/papers.jsp").forward(request,
- response);
- }
-}
-
diff --git a/website/src/org/torproject/ernie/web/research/ResearchStatsServlet.java b/website/src/org/torproject/ernie/web/research/ResearchStatsServlet.java
deleted file mode 100644
index 2ca93a3..0000000
--- a/website/src/org/torproject/ernie/web/research/ResearchStatsServlet.java
+++ /dev/null
@@ -1,132 +0,0 @@
-/* Copyright 2013 The Tor Project
- * See LICENSE for licensing information */
-package org.torproject.ernie.web.research;
-
-import java.io.BufferedInputStream;
-import java.io.ByteArrayOutputStream;
-import java.io.File;
-import java.io.FileInputStream;
-import java.io.IOException;
-import java.util.SortedSet;
-import java.util.TreeSet;
-
-import javax.servlet.ServletConfig;
-import javax.servlet.ServletException;
-import javax.servlet.http.HttpServlet;
-import javax.servlet.http.HttpServletRequest;
-import javax.servlet.http.HttpServletResponse;
-
-public class ResearchStatsServlet extends HttpServlet {
-
- private static final long serialVersionUID = 3346710354297653810L;
-
- private File statsDir;
-
- private SortedSet<String> availableStatisticsFiles;
-
- public void init(ServletConfig config) throws ServletException {
- super.init(config);
- this.statsDir = new File(config.getInitParameter("statsDir"));
- this.availableStatisticsFiles = new TreeSet<String>();
- this.availableStatisticsFiles.add("servers");
- this.availableStatisticsFiles.add("bandwidth");
- this.availableStatisticsFiles.add("fast-exits");
- this.availableStatisticsFiles.add("clients");
- this.availableStatisticsFiles.add("torperf");
- this.availableStatisticsFiles.add("connbidirect");
- }
-
- public long getLastModified(HttpServletRequest request) {
- File statsFile = this.determineStatsFile(request);
- if (statsFile == null || !statsFile.exists()) {
- return -1L;
- } else {
- return statsFile.lastModified();
- }
- }
-
- public void doGet(HttpServletRequest request,
- HttpServletResponse response) throws IOException, ServletException {
- String requestURI = request.getRequestURI();
- if (requestURI.equals("/ernie/stats/")) {
- this.writeDirectoryListing(request, response);
- } else if (requestURI.equals("/ernie/stats.html")) {
- this.writeStatisticsPage(request, response);
- } else {
- File statsFile = this.determineStatsFile(request);
- if (statsFile == null) {
- response.sendError(HttpServletResponse.SC_NOT_FOUND);
- return;
- } else if (!this.writeStatsFile(statsFile, response)) {
- response.sendError(HttpServletResponse.SC_INTERNAL_SERVER_ERROR);
- }
- }
- }
-
- private void writeDirectoryListing(HttpServletRequest request,
- HttpServletResponse response) throws IOException, ServletException {
- request.setAttribute("directory", "/stats");
- request.setAttribute("extension", ".csv");
- request.setAttribute("files", this.availableStatisticsFiles);
- request.getRequestDispatcher("/WEB-INF/dir.jsp").forward(request,
- response);
- }
-
- private void writeStatisticsPage(HttpServletRequest request,
- HttpServletResponse response) throws IOException, ServletException {
- request.getRequestDispatcher("/WEB-INF/stats.jsp").forward(request,
- response);
- }
-
- private File determineStatsFile(HttpServletRequest request) {
- String requestedStatsFile = request.getRequestURI();
- if (requestedStatsFile.equals("/ernie/stats/") ||
- requestedStatsFile.equals("/ernie/stats.html")) {
- return null;
- }
- if (requestedStatsFile.endsWith(".csv")) {
- requestedStatsFile = requestedStatsFile.substring(0,
- requestedStatsFile.length() - ".csv".length());
- }
- if (requestedStatsFile.contains("/")) {
- requestedStatsFile = requestedStatsFile.substring(
- requestedStatsFile.lastIndexOf("/") + 1);
- }
- if (!availableStatisticsFiles.contains(requestedStatsFile)) {
- return null;
- } else {
- return new File(this.statsDir, requestedStatsFile + ".csv");
- }
- }
-
- private boolean writeStatsFile(File statsFile,
- HttpServletResponse response) throws IOException, ServletException {
- if (!statsFile.exists()) {
- return false;
- }
- byte[] statsFileBytes;
- try {
- BufferedInputStream bis = new BufferedInputStream(
- new FileInputStream(statsFile), 1024);
- ByteArrayOutputStream baos = new ByteArrayOutputStream();
- byte[] buffer = new byte[1024];
- int length;
- while ((length = bis.read(buffer)) > 0) {
- baos.write(buffer, 0, length);
- }
- bis.close();
- statsFileBytes = baos.toByteArray();
- } catch (IOException e) {
- return false;
- }
- String statsFileContent = new String(statsFileBytes);
- response.setContentType("text/csv");
- response.setHeader("Content-Length", String.valueOf(
- statsFileContent.length()));
- response.setHeader("Content-Disposition",
- "inline; filename=\"" + statsFile.getName() + "\"");
- response.getWriter().print(statsFileContent);
- return true;
- }
-}
-
diff --git a/website/src/org/torproject/ernie/web/research/ResearchToolsServlet.java b/website/src/org/torproject/ernie/web/research/ResearchToolsServlet.java
deleted file mode 100644
index 173a1da..0000000
--- a/website/src/org/torproject/ernie/web/research/ResearchToolsServlet.java
+++ /dev/null
@@ -1,24 +0,0 @@
-/* Copyright 2011, 2012 The Tor Project
- * See LICENSE for licensing information */
-package org.torproject.ernie.web.research;
-
-import java.io.IOException;
-
-import javax.servlet.ServletException;
-import javax.servlet.http.HttpServlet;
-import javax.servlet.http.HttpServletRequest;
-import javax.servlet.http.HttpServletResponse;
-
-public class ResearchToolsServlet extends HttpServlet {
-
- private static final long serialVersionUID = -3344204426180358872L;
-
- public void doGet(HttpServletRequest request,
- HttpServletResponse response) throws IOException, ServletException {
-
- /* Forward the request to the JSP that does all the hard work. */
- request.getRequestDispatcher("WEB-INF/tools.jsp").forward(request,
- response);
- }
-}
-
diff --git a/website/src/org/torproject/metrics/web/GraphsServlet.java b/website/src/org/torproject/metrics/web/GraphsServlet.java
new file mode 100644
index 0000000..eb5eaf4
--- /dev/null
+++ b/website/src/org/torproject/metrics/web/GraphsServlet.java
@@ -0,0 +1,24 @@
+/* Copyright 2011, 2012 The Tor Project
+ * See LICENSE for licensing information */
+package org.torproject.metrics.web;
+
+import java.io.IOException;
+
+import javax.servlet.ServletException;
+import javax.servlet.http.HttpServlet;
+import javax.servlet.http.HttpServletRequest;
+import javax.servlet.http.HttpServletResponse;
+
+public class GraphsServlet extends HttpServlet {
+
+ private static final long serialVersionUID = 7615715032362498151L;
+
+ public void doGet(HttpServletRequest request,
+ HttpServletResponse response) throws IOException, ServletException {
+
+ /* Forward the request to the JSP that does all the hard work. */
+ request.getRequestDispatcher("WEB-INF/graphs.jsp").forward(request,
+ response);
+ }
+}
+
diff --git a/website/src/org/torproject/metrics/web/IndexServlet.java b/website/src/org/torproject/metrics/web/IndexServlet.java
new file mode 100644
index 0000000..c42b0e9
--- /dev/null
+++ b/website/src/org/torproject/metrics/web/IndexServlet.java
@@ -0,0 +1,24 @@
+/* Copyright 2011, 2012 The Tor Project
+ * See LICENSE for licensing information */
+package org.torproject.metrics.web;
+
+import java.io.IOException;
+
+import javax.servlet.ServletException;
+import javax.servlet.http.HttpServlet;
+import javax.servlet.http.HttpServletRequest;
+import javax.servlet.http.HttpServletResponse;
+
+public class IndexServlet extends HttpServlet {
+
+ private static final long serialVersionUID = 7871368999788994664L;
+
+ public void doGet(HttpServletRequest request,
+ HttpServletResponse response) throws IOException, ServletException {
+
+ /* Forward the request to the JSP that does all the hard work. */
+ request.getRequestDispatcher("WEB-INF/index.jsp").forward(request,
+ response);
+ }
+}
+
diff --git a/website/src/org/torproject/metrics/web/ResearchServlet.java b/website/src/org/torproject/metrics/web/ResearchServlet.java
new file mode 100644
index 0000000..f69c45d
--- /dev/null
+++ b/website/src/org/torproject/metrics/web/ResearchServlet.java
@@ -0,0 +1,24 @@
+/* Copyright 2011, 2012 The Tor Project
+ * See LICENSE for licensing information */
+package org.torproject.metrics.web;
+
+import java.io.IOException;
+
+import javax.servlet.ServletException;
+import javax.servlet.http.HttpServlet;
+import javax.servlet.http.HttpServletRequest;
+import javax.servlet.http.HttpServletResponse;
+
+public class ResearchServlet extends HttpServlet {
+
+ private static final long serialVersionUID = -9151727188925700665L;
+
+ public void doGet(HttpServletRequest request,
+ HttpServletResponse response) throws IOException, ServletException {
+
+ /* Forward the request to the JSP that does all the hard work. */
+ request.getRequestDispatcher("WEB-INF/research.jsp").forward(request,
+ response);
+ }
+}
+
diff --git a/website/src/org/torproject/metrics/web/StatusServlet.java b/website/src/org/torproject/metrics/web/StatusServlet.java
new file mode 100644
index 0000000..ec831df
--- /dev/null
+++ b/website/src/org/torproject/metrics/web/StatusServlet.java
@@ -0,0 +1,24 @@
+/* Copyright 2011, 2012 The Tor Project
+ * See LICENSE for licensing information */
+package org.torproject.metrics.web;
+
+import java.io.IOException;
+
+import javax.servlet.ServletException;
+import javax.servlet.http.HttpServlet;
+import javax.servlet.http.HttpServletRequest;
+import javax.servlet.http.HttpServletResponse;
+
+public class StatusServlet extends HttpServlet {
+
+ private static final long serialVersionUID = -7249872082399236981L;
+
+ public void doGet(HttpServletRequest request,
+ HttpServletResponse response) throws IOException, ServletException {
+
+ /* Forward the request to the JSP that does all the hard work. */
+ request.getRequestDispatcher("WEB-INF/status.jsp").forward(request,
+ response);
+ }
+}
+
diff --git a/website/src/org/torproject/metrics/web/graphs/BubblesServlet.java b/website/src/org/torproject/metrics/web/graphs/BubblesServlet.java
new file mode 100644
index 0000000..f273194
--- /dev/null
+++ b/website/src/org/torproject/metrics/web/graphs/BubblesServlet.java
@@ -0,0 +1,24 @@
+/* Copyright 2013 The Tor Project
+ * See LICENSE for licensing information */
+package org.torproject.metrics.web.graphs;
+
+import java.io.IOException;
+
+import javax.servlet.ServletException;
+import javax.servlet.http.HttpServlet;
+import javax.servlet.http.HttpServletRequest;
+import javax.servlet.http.HttpServletResponse;
+
+public class BubblesServlet extends HttpServlet {
+
+ private static final long serialVersionUID = -6011833075497881033L;
+
+ public void doGet(HttpServletRequest request,
+ HttpServletResponse response) throws IOException, ServletException {
+
+ /* Forward the request to the JSP that does all the hard work. */
+ request.getRequestDispatcher("WEB-INF/bubbles.jsp").forward(request,
+ response);
+ }
+}
+
diff --git a/website/src/org/torproject/metrics/web/graphs/Countries.java b/website/src/org/torproject/metrics/web/graphs/Countries.java
new file mode 100644
index 0000000..7a99a46
--- /dev/null
+++ b/website/src/org/torproject/metrics/web/graphs/Countries.java
@@ -0,0 +1,285 @@
+/* Copyright 2011, 2012 The Tor Project
+ * See LICENSE for licensing information */
+package org.torproject.metrics.web.graphs;
+
+import java.util.ArrayList;
+import java.util.List;
+
+public class Countries {
+
+ private static Countries instance = new Countries();
+
+ public static Countries getInstance() {
+ return Countries.instance;
+ }
+
+ /* List of arrays of length 2, containing country codes at [0] and
+ * country names at [1], alphabetically ordered by country names. */
+ private List<String[]> knownCountries;
+
+ private Countries() {
+ this.knownCountries = new ArrayList<String[]>();
+ this.knownCountries.add("af;Afghanistan".split(";"));
+ this.knownCountries.add("ax;Aland Islands".split(";"));
+ this.knownCountries.add("al;Albania".split(";"));
+ this.knownCountries.add("dz;Algeria".split(";"));
+ this.knownCountries.add("as;American Samoa".split(";"));
+ this.knownCountries.add("ad;Andorra".split(";"));
+ this.knownCountries.add("ao;Angola".split(";"));
+ this.knownCountries.add("ai;Anguilla".split(";"));
+ this.knownCountries.add("aq;Antarctica".split(";"));
+ this.knownCountries.add("ag;Antigua and Barbuda".split(";"));
+ this.knownCountries.add("ar;Argentina".split(";"));
+ this.knownCountries.add("am;Armenia".split(";"));
+ this.knownCountries.add("aw;Aruba".split(";"));
+ this.knownCountries.add("au;Australia".split(";"));
+ this.knownCountries.add("at;Austria".split(";"));
+ this.knownCountries.add("az;Azerbaijan".split(";"));
+ this.knownCountries.add("bs;Bahamas".split(";"));
+ this.knownCountries.add("bh;Bahrain".split(";"));
+ this.knownCountries.add("bd;Bangladesh".split(";"));
+ this.knownCountries.add("bb;Barbados".split(";"));
+ this.knownCountries.add("by;Belarus".split(";"));
+ this.knownCountries.add("be;Belgium".split(";"));
+ this.knownCountries.add("bz;Belize".split(";"));
+ this.knownCountries.add("bj;Benin".split(";"));
+ this.knownCountries.add("bm;Bermuda".split(";"));
+ this.knownCountries.add("bt;Bhutan".split(";"));
+ this.knownCountries.add("bo;Bolivia".split(";"));
+ this.knownCountries.add("ba;Bosnia and Herzegovina".split(";"));
+ this.knownCountries.add("bw;Botswana".split(";"));
+ this.knownCountries.add("bv;Bouvet Island".split(";"));
+ this.knownCountries.add("br;Brazil".split(";"));
+ this.knownCountries.add("io;British Indian Ocean Territory".
+ split(";"));
+ this.knownCountries.add("bn;Brunei".split(";"));
+ this.knownCountries.add("bg;Bulgaria".split(";"));
+ this.knownCountries.add("bf;Burkina Faso".split(";"));
+ this.knownCountries.add("mm;Burma".split(";"));
+ this.knownCountries.add("bi;Burundi".split(";"));
+ this.knownCountries.add("kh;Cambodia".split(";"));
+ this.knownCountries.add("cm;Cameroon".split(";"));
+ this.knownCountries.add("ca;Canada".split(";"));
+ this.knownCountries.add("cv;Cape Verde".split(";"));
+ this.knownCountries.add("ky;Cayman Islands".split(";"));
+ this.knownCountries.add("cf;Central African Republic".split(";"));
+ this.knownCountries.add("td;Chad".split(";"));
+ this.knownCountries.add("cl;Chile".split(";"));
+ this.knownCountries.add("cn;China".split(";"));
+ this.knownCountries.add("cx;Christmas Island".split(";"));
+ this.knownCountries.add("cc;Cocos (Keeling) Islands".split(";"));
+ this.knownCountries.add("co;Colombia".split(";"));
+ this.knownCountries.add("km;Comoros".split(";"));
+ this.knownCountries.add("cd;Congo, The Democratic Republic of the".
+ split(";"));
+ this.knownCountries.add("cg;Congo".split(";"));
+ this.knownCountries.add("ck;Cook Islands".split(";"));
+ this.knownCountries.add("cr;Costa Rica".split(";"));
+ this.knownCountries.add("ci:Côte d'Ivoire".split(":"));
+ this.knownCountries.add("hr;Croatia".split(";"));
+ this.knownCountries.add("cu;Cuba".split(";"));
+ this.knownCountries.add("cy;Cyprus".split(";"));
+ this.knownCountries.add("cz;Czech Republic".split(";"));
+ this.knownCountries.add("dk;Denmark".split(";"));
+ this.knownCountries.add("dj;Djibouti".split(";"));
+ this.knownCountries.add("dm;Dominica".split(";"));
+ this.knownCountries.add("do;Dominican Republic".split(";"));
+ this.knownCountries.add("ec;Ecuador".split(";"));
+ this.knownCountries.add("eg;Egypt".split(";"));
+ this.knownCountries.add("sv;El Salvador".split(";"));
+ this.knownCountries.add("gq;Equatorial Guinea".split(";"));
+ this.knownCountries.add("er;Eritrea".split(";"));
+ this.knownCountries.add("ee;Estonia".split(";"));
+ this.knownCountries.add("et;Ethiopia".split(";"));
+ this.knownCountries.add("fk;Falkland Islands (Malvinas)".split(";"));
+ this.knownCountries.add("fo;Faroe Islands".split(";"));
+ this.knownCountries.add("fj;Fiji".split(";"));
+ this.knownCountries.add("fi;Finland".split(";"));
+ this.knownCountries.add("fx;France, Metropolitan".split(";"));
+ this.knownCountries.add("fr;France".split(";"));
+ this.knownCountries.add("gf;French Guiana".split(";"));
+ this.knownCountries.add("pf;French Polynesia".split(";"));
+ this.knownCountries.add("tf;French Southern Territories".split(";"));
+ this.knownCountries.add("ga;Gabon".split(";"));
+ this.knownCountries.add("gm;Gambia".split(";"));
+ this.knownCountries.add("ge;Georgia".split(";"));
+ this.knownCountries.add("de;Germany".split(";"));
+ this.knownCountries.add("gh;Ghana".split(";"));
+ this.knownCountries.add("gi;Gibraltar".split(";"));
+ this.knownCountries.add("gr;Greece".split(";"));
+ this.knownCountries.add("gl;Greenland".split(";"));
+ this.knownCountries.add("gd;Grenada".split(";"));
+ this.knownCountries.add("gp;Guadeloupe".split(";"));
+ this.knownCountries.add("gu;Guam".split(";"));
+ this.knownCountries.add("gt;Guatemala".split(";"));
+ this.knownCountries.add("gg;Guernsey".split(";"));
+ this.knownCountries.add("gn;Guinea".split(";"));
+ this.knownCountries.add("gw;Guinea-Bissau".split(";"));
+ this.knownCountries.add("gy;Guyana".split(";"));
+ this.knownCountries.add("ht;Haiti".split(";"));
+ this.knownCountries.add("hm;Heard Island and McDonald Islands".
+ split(";"));
+ this.knownCountries.add("va;Vatican City".split(";"));
+ this.knownCountries.add("hn;Honduras".split(";"));
+ this.knownCountries.add("hk;Hong Kong".split(";"));
+ this.knownCountries.add("hu;Hungary".split(";"));
+ this.knownCountries.add("is;Iceland".split(";"));
+ this.knownCountries.add("in;India".split(";"));
+ this.knownCountries.add("id;Indonesia".split(";"));
+ this.knownCountries.add("ir;Iran".split(";"));
+ this.knownCountries.add("iq;Iraq".split(";"));
+ this.knownCountries.add("ie;Ireland".split(";"));
+ this.knownCountries.add("im;Isle of Man".split(";"));
+ this.knownCountries.add("il;Israel".split(";"));
+ this.knownCountries.add("it;Italy".split(";"));
+ this.knownCountries.add("jm;Jamaica".split(";"));
+ this.knownCountries.add("jp;Japan".split(";"));
+ this.knownCountries.add("je;Jersey".split(";"));
+ this.knownCountries.add("jo;Jordan".split(";"));
+ this.knownCountries.add("kz;Kazakhstan".split(";"));
+ this.knownCountries.add("ke;Kenya".split(";"));
+ this.knownCountries.add("ki;Kiribati".split(";"));
+ this.knownCountries.add("kp;North Korea".split(";"));
+ this.knownCountries.add("kr;Korea, Republic of".split(";"));
+ this.knownCountries.add("kw;Kuwait".split(";"));
+ this.knownCountries.add("kg;Kyrgyzstan".split(";"));
+ this.knownCountries.add("la;Laos".split(";"));
+ this.knownCountries.add("lv;Latvia".split(";"));
+ this.knownCountries.add("lb;Lebanon".split(";"));
+ this.knownCountries.add("ls;Lesotho".split(";"));
+ this.knownCountries.add("lr;Liberia".split(";"));
+ this.knownCountries.add("ly;Libya".split(";"));
+ this.knownCountries.add("li;Liechtenstein".split(";"));
+ this.knownCountries.add("lt;Lithuania".split(";"));
+ this.knownCountries.add("lu;Luxembourg".split(";"));
+ this.knownCountries.add("mo;Macau".split(";"));
+ this.knownCountries.add("mk;Macedonia".split(";"));
+ this.knownCountries.add("mg;Madagascar".split(";"));
+ this.knownCountries.add("mw;Malawi".split(";"));
+ this.knownCountries.add("my;Malaysia".split(";"));
+ this.knownCountries.add("mv;Maldives".split(";"));
+ this.knownCountries.add("ml;Mali".split(";"));
+ this.knownCountries.add("mt;Malta".split(";"));
+ this.knownCountries.add("mh;Marshall Islands".split(";"));
+ this.knownCountries.add("mq;Martinique".split(";"));
+ this.knownCountries.add("mr;Mauritania".split(";"));
+ this.knownCountries.add("mu;Mauritius".split(";"));
+ this.knownCountries.add("yt;Mayotte".split(";"));
+ this.knownCountries.add("mx;Mexico".split(";"));
+ this.knownCountries.add("fm;Micronesia, Federated States of".
+ split(";"));
+ this.knownCountries.add("md;Moldova, Republic of".split(";"));
+ this.knownCountries.add("mc;Monaco".split(";"));
+ this.knownCountries.add("mn;Mongolia".split(";"));
+ this.knownCountries.add("me;Montenegro".split(";"));
+ this.knownCountries.add("ms;Montserrat".split(";"));
+ this.knownCountries.add("ma;Morocco".split(";"));
+ this.knownCountries.add("mz;Mozambique".split(";"));
+ this.knownCountries.add("mm;Burma".split(";"));
+ this.knownCountries.add("na;Namibia".split(";"));
+ this.knownCountries.add("nr;Nauru".split(";"));
+ this.knownCountries.add("np;Nepal".split(";"));
+ this.knownCountries.add("an;Netherlands Antilles".split(";"));
+ this.knownCountries.add("nl;Netherlands".split(";"));
+ this.knownCountries.add("nc;New Caledonia".split(";"));
+ this.knownCountries.add("nz;New Zealand".split(";"));
+ this.knownCountries.add("ni;Nicaragua".split(";"));
+ this.knownCountries.add("ne;Niger".split(";"));
+ this.knownCountries.add("ng;Nigeria".split(";"));
+ this.knownCountries.add("nu;Niue".split(";"));
+ this.knownCountries.add("nf;Norfolk Island".split(";"));
+ this.knownCountries.add("mp;Northern Mariana Islands".split(";"));
+ this.knownCountries.add("no;Norway".split(";"));
+ this.knownCountries.add("om;Oman".split(";"));
+ this.knownCountries.add("pk;Pakistan".split(";"));
+ this.knownCountries.add("pw;Palau".split(";"));
+ this.knownCountries.add("ps;Palestinian Territory".split(";"));
+ this.knownCountries.add("pa;Panama".split(";"));
+ this.knownCountries.add("pg;Papua New Guinea".split(";"));
+ this.knownCountries.add("py;Paraguay".split(";"));
+ this.knownCountries.add("pe;Peru".split(";"));
+ this.knownCountries.add("ph;Philippines".split(";"));
+ this.knownCountries.add("pn;Pitcairn Islands".split(";"));
+ this.knownCountries.add("pl;Poland".split(";"));
+ this.knownCountries.add("pt;Portugal".split(";"));
+ this.knownCountries.add("pr;Puerto Rico".split(";"));
+ this.knownCountries.add("qa;Qatar".split(";"));
+ this.knownCountries.add("re;Reunion".split(";"));
+ this.knownCountries.add("ro;Romania".split(";"));
+ this.knownCountries.add("ru;Russia".split(";"));
+ this.knownCountries.add("rw;Rwanda".split(";"));
+ this.knownCountries.add("bl;Saint Bartelemey".split(";"));
+ this.knownCountries.add("sh;Saint Helena".split(";"));
+ this.knownCountries.add("kn;Saint Kitts and Nevis".split(";"));
+ this.knownCountries.add("lc;Saint Lucia".split(";"));
+ this.knownCountries.add("mf;Saint Martin".split(";"));
+ this.knownCountries.add("pm;Saint Pierre and Miquelon".split(";"));
+ this.knownCountries.add("vc;Saint Vincent and the Grenadines".
+ split(";"));
+ this.knownCountries.add("ws;Samoa".split(";"));
+ this.knownCountries.add("sm;San Marino".split(";"));
+ this.knownCountries.add("st:São Tomé and Príncipe".
+ split(":"));
+ this.knownCountries.add("sa;Saudi Arabia".split(";"));
+ this.knownCountries.add("sn;Senegal".split(";"));
+ this.knownCountries.add("rs;Serbia".split(";"));
+ this.knownCountries.add("sc;Seychelles".split(";"));
+ this.knownCountries.add("sl;Sierra Leone".split(";"));
+ this.knownCountries.add("sg;Singapore".split(";"));
+ this.knownCountries.add("sk;Slovakia".split(";"));
+ this.knownCountries.add("si;Slovenia".split(";"));
+ this.knownCountries.add("sb;Solomon Islands".split(";"));
+ this.knownCountries.add("so;Somalia".split(";"));
+ this.knownCountries.add("za;South Africa".split(";"));
+ this.knownCountries.add(("gs;South Georgia and the South Sandwich "
+ + "Islands").split(";"));
+ this.knownCountries.add("ss;South Sudan".split(";"));
+ this.knownCountries.add("es;Spain".split(";"));
+ this.knownCountries.add("lk;Sri Lanka".split(";"));
+ this.knownCountries.add("sd;Sudan".split(";"));
+ this.knownCountries.add("sr;Suriname".split(";"));
+ this.knownCountries.add("sj;Svalbard and Jan Mayen".split(";"));
+ this.knownCountries.add("sz;Swaziland".split(";"));
+ this.knownCountries.add("se;Sweden".split(";"));
+ this.knownCountries.add("ch;Switzerland".split(";"));
+ this.knownCountries.add("sy;Syrian Arab Republic".split(";"));
+ this.knownCountries.add("tw;Taiwan".split(";"));
+ this.knownCountries.add("tj;Tajikistan".split(";"));
+ this.knownCountries.add("tz;Tanzania, United Republic of".split(";"));
+ this.knownCountries.add("th;Thailand".split(";"));
+ this.knownCountries.add("tl;East Timor".split(";"));
+ this.knownCountries.add("tg;Togo".split(";"));
+ this.knownCountries.add("tk;Tokelau".split(";"));
+ this.knownCountries.add("to;Tonga".split(";"));
+ this.knownCountries.add("tt;Trinidad and Tobago".split(";"));
+ this.knownCountries.add("tn;Tunisia".split(";"));
+ this.knownCountries.add("tr;Turkey".split(";"));
+ this.knownCountries.add("tm;Turkmenistan".split(";"));
+ this.knownCountries.add("tc;Turks and Caicos Islands".split(";"));
+ this.knownCountries.add("tv;Tuvalu".split(";"));
+ this.knownCountries.add("ug;Uganda".split(";"));
+ this.knownCountries.add("ua;Ukraine".split(";"));
+ this.knownCountries.add("ae;United Arab Emirates".split(";"));
+ this.knownCountries.add("gb;United Kingdom".split(";"));
+ this.knownCountries.add("um;United States Minor Outlying Islands".
+ split(";"));
+ this.knownCountries.add("us;United States".split(";"));
+ this.knownCountries.add("uy;Uruguay".split(";"));
+ this.knownCountries.add("uz;Uzbekistan".split(";"));
+ this.knownCountries.add("vu;Vanuatu".split(";"));
+ this.knownCountries.add("ve;Venezuela".split(";"));
+ this.knownCountries.add("vn;Vietnam".split(";"));
+ this.knownCountries.add("vg;Virgin Islands, British".split(";"));
+ this.knownCountries.add("vi;Virgin Islands, U.S.".split(";"));
+ this.knownCountries.add("wf;Wallis and Futuna".split(";"));
+ this.knownCountries.add("eh;Western Sahara".split(";"));
+ this.knownCountries.add("ye;Yemen".split(";"));
+ this.knownCountries.add("zm;Zambia".split(";"));
+ this.knownCountries.add("zw;Zimbabwe".split(";"));
+ }
+
+ public List<String[]> getCountryList() {
+ return this.knownCountries;
+ }
+}
+
diff --git a/website/src/org/torproject/metrics/web/graphs/CsvServlet.java b/website/src/org/torproject/metrics/web/graphs/CsvServlet.java
new file mode 100644
index 0000000..96e5f6a
--- /dev/null
+++ b/website/src/org/torproject/metrics/web/graphs/CsvServlet.java
@@ -0,0 +1,97 @@
+/* Copyright 2011, 2012 The Tor Project
+ * See LICENSE for licensing information */
+package org.torproject.metrics.web.graphs;
+
+import java.io.IOException;
+import java.util.SortedSet;
+import java.util.logging.Logger;
+
+import javax.servlet.ServletException;
+import javax.servlet.http.HttpServlet;
+import javax.servlet.http.HttpServletRequest;
+import javax.servlet.http.HttpServletResponse;
+
+/**
+ * Servlet that reads an HTTP request for a comma-separated value file,
+ * asks the GraphGenerator to generate this file, and returns it to the
+ * client.
+ */
+public class CsvServlet extends HttpServlet {
+
+ private static final long serialVersionUID = 7501442926823719958L;
+
+ private RObjectGenerator rObjectGenerator;
+
+ /* Available CSV files. */
+ private SortedSet<String> availableCsvFiles;
+
+ private Logger logger;
+
+ public void init() {
+
+ /* Initialize logger. */
+ this.logger = Logger.getLogger(CsvServlet.class.toString());
+
+ /* Get a reference to the R object generator that we need to generate
+ * CSV files. */
+ this.rObjectGenerator = (RObjectGenerator) getServletContext().
+ getAttribute("RObjectGenerator");
+ this.availableCsvFiles = rObjectGenerator.getAvailableCsvFiles();
+ }
+
+ public void doGet(HttpServletRequest request,
+ HttpServletResponse response) throws IOException,
+ ServletException {
+
+ /* Check if the directory listing was requested. */
+ String requestURI = request.getRequestURI();
+ if (requestURI.equals("/ernie/csv/")) {
+ request.setAttribute("directory", "/csv");
+ request.setAttribute("extension", ".csv");
+ request.setAttribute("files", this.availableCsvFiles);
+ request.getRequestDispatcher("/WEB-INF/dir.jsp").forward(request,
+ response);
+ return;
+ }
+
+ /* Find out which CSV file was requested and make sure we know this
+ * CSV file type. */
+ String requestedCsvFile = requestURI;
+ if (requestedCsvFile.endsWith(".csv")) {
+ requestedCsvFile = requestedCsvFile.substring(0,
+ requestedCsvFile.length() - ".csv".length());
+ }
+ if (requestedCsvFile.contains("/")) {
+ requestedCsvFile = requestedCsvFile.substring(requestedCsvFile.
+ lastIndexOf("/") + 1);
+ }
+ if (!availableCsvFiles.contains(requestedCsvFile)) {
+ logger.info("Did not recognize requested .csv file from request "
+ + "URI: '" + requestURI + "'. Responding with 404 Not Found.");
+ response.sendError(HttpServletResponse.SC_NOT_FOUND);
+ return;
+ }
+ logger.fine("CSV file '" + requestedCsvFile + ".csv' requested.");
+
+ /* Request CSV file from R object generator, which asks Rserve to
+ * generate it. */
+ RObject csvFile = this.rObjectGenerator.generateCsv(
+ requestedCsvFile, true);
+
+ /* Make sure that we have a .csv file to return. */
+ if (csvFile == null) {
+ response.sendError(HttpServletResponse.SC_INTERNAL_SERVER_ERROR);
+ return;
+ }
+
+ /* Write CSV file to response. */
+ String csvFileContent = new String(csvFile.getBytes());
+ response.setContentType("text/csv");
+ response.setHeader("Content-Length", String.valueOf(
+ csvFileContent.length()));
+ response.setHeader("Content-Disposition",
+ "inline; filename=\"" + requestedCsvFile + ".csv\"");
+ response.getWriter().print(csvFileContent);
+ }
+}
+
diff --git a/website/src/org/torproject/metrics/web/graphs/GraphImageServlet.java b/website/src/org/torproject/metrics/web/graphs/GraphImageServlet.java
new file mode 100644
index 0000000..08f256a
--- /dev/null
+++ b/website/src/org/torproject/metrics/web/graphs/GraphImageServlet.java
@@ -0,0 +1,76 @@
+/* Copyright 2011, 2012 The Tor Project
+ * See LICENSE for licensing information */
+package org.torproject.metrics.web.graphs;
+
+import java.io.BufferedOutputStream;
+import java.io.IOException;
+
+import javax.servlet.ServletException;
+import javax.servlet.http.HttpServlet;
+import javax.servlet.http.HttpServletRequest;
+import javax.servlet.http.HttpServletResponse;
+
+/**
+ * Servlet that reads an HTTP request for a graph image, asks the
+ * RObjectGenerator to generate this graph if it's not in the cache, and
+ * returns the image bytes to the client.
+ */
+public class GraphImageServlet extends HttpServlet {
+
+ private static final long serialVersionUID = -7356818641689744288L;
+
+ private RObjectGenerator rObjectGenerator;
+
+ public void init() {
+
+ /* Get a reference to the R object generator that we need to generate
+ * graph images. */
+ this.rObjectGenerator = (RObjectGenerator) getServletContext().
+ getAttribute("RObjectGenerator");
+ }
+
+ public void doGet(HttpServletRequest request,
+ HttpServletResponse response) throws IOException,
+ ServletException {
+
+ /* Find out which graph type was requested and make sure we know this
+ * graph type and file type. */
+ String requestedGraph = request.getRequestURI();
+ String fileType = null;
+ if (requestedGraph.endsWith(".png") ||
+ requestedGraph.endsWith(".pdf") ||
+ requestedGraph.endsWith(".svg")) {
+ fileType = requestedGraph.substring(requestedGraph.length() - 3);
+ requestedGraph = requestedGraph.substring(0, requestedGraph.length()
+ - 4);
+ }
+ if (requestedGraph.contains("/")) {
+ requestedGraph = requestedGraph.substring(requestedGraph.
+ lastIndexOf("/") + 1);
+ }
+
+ /* Request graph from R object generator, which either returns it from
+ * its cache or asks Rserve to generate it. */
+ RObject graph = rObjectGenerator.generateGraph(requestedGraph,
+ fileType, request.getParameterMap(), true);
+
+ /* Make sure that we have a graph to return. */
+ if (graph == null || graph.getBytes() == null || fileType == null) {
+ response.sendError(HttpServletResponse.SC_INTERNAL_SERVER_ERROR);
+ return;
+ }
+
+ /* Write graph bytes to response. */
+ BufferedOutputStream output = null;
+ response.setContentType("image/" + fileType);
+ response.setHeader("Content-Length",
+ String.valueOf(graph.getBytes().length));
+ response.setHeader("Content-Disposition",
+ "inline; filename=\"" + graph.getFileName() + "\"");
+ output = new BufferedOutputStream(response.getOutputStream(), 1024);
+ output.write(graph.getBytes(), 0, graph.getBytes().length);
+ output.flush();
+ output.close();
+ }
+}
+
diff --git a/website/src/org/torproject/metrics/web/graphs/GraphParameterChecker.java b/website/src/org/torproject/metrics/web/graphs/GraphParameterChecker.java
new file mode 100644
index 0000000..098d908
--- /dev/null
+++ b/website/src/org/torproject/metrics/web/graphs/GraphParameterChecker.java
@@ -0,0 +1,280 @@
+/* Copyright 2011, 2012 The Tor Project
+ * See LICENSE for licensing information */
+package org.torproject.metrics.web.graphs;
+
+import java.text.ParseException;
+import java.text.SimpleDateFormat;
+import java.util.Arrays;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+import java.util.TimeZone;
+import java.util.regex.Pattern;
+
+/**
+ * Checks request parameters passed to graph-generating servlets.
+ */
+public class GraphParameterChecker {
+
+ /**
+ * Singleton instance of this class.
+ */
+ private static GraphParameterChecker instance =
+ new GraphParameterChecker();
+
+ /**
+ * Returns the singleton instance of this class.
+ */
+ public static GraphParameterChecker getInstance() {
+ return instance;
+ }
+
+ /* Date format for parsing start and end dates. */
+ private SimpleDateFormat dateFormat;
+
+ /* Available graphs with corresponding parameter lists. */
+ private Map<String, String> availableGraphs;
+
+ /* Known parameters and parameter values. */
+ private Map<String, String> knownParameterValues;
+
+ /**
+ * Initializes map with valid parameters for each of the graphs.
+ */
+ public GraphParameterChecker() {
+ this.dateFormat = new SimpleDateFormat("yyyy-MM-dd");
+ this.dateFormat.setTimeZone(TimeZone.getTimeZone("UTC"));
+
+ this.knownParameterValues = new HashMap<String, String>();
+ this.knownParameterValues.put("flag",
+ "Running,Exit,Guard,Fast,Stable,HSDir");
+ StringBuilder sb = new StringBuilder("all");
+ for (String[] country : Countries.getInstance().getCountryList()) {
+ sb.append("," + country[0]);
+ }
+ this.knownParameterValues.put("country", sb.toString());
+ this.knownParameterValues.put("events", "on,off,points");
+ this.knownParameterValues.put("source", "all,siv,moria,torperf");
+ this.knownParameterValues.put("filesize", "50kb,1mb,5mb");
+ this.knownParameterValues.put("transport",
+ "obfs2,obfs3,websocket,<OR>,<??>");
+ this.knownParameterValues.put("version", "v4,v6");
+ }
+
+ public void setAvailableGraphs(Map<String, String> availableGraphs) {
+ this.availableGraphs = availableGraphs;
+ }
+
+ /**
+ * Checks request parameters for the given graph type and returns a map
+ * of recognized parameters, or null if the graph type doesn't exist or
+ * the parameters are invalid.
+ */
+ public Map<String, String[]> checkParameters(String graphType,
+ Map requestParameters) {
+
+ /* Check if the graph type exists. */
+ if (graphType == null ||
+ !this.availableGraphs.containsKey(graphType)) {
+ return null;
+ }
+
+ /* Find out which other parameters are supported by this graph type
+ * and parse them if they are given. */
+ Set<String> supportedGraphParameters = new HashSet<String>(Arrays.
+ asList(this.availableGraphs.get(graphType).split(",")));
+ Map<String, String[]> recognizedGraphParameters =
+ new HashMap<String, String[]>();
+
+ /* Parse start and end dates if supported by the graph type. If no end
+ * date is provided, set it to today. If no start date is provided,
+ * set it to 90 days before the end date. Make sure that start date
+ * precedes end date. */
+ if (supportedGraphParameters.contains("start") ||
+ supportedGraphParameters.contains("end")) {
+ String[] startParameter = (String[]) requestParameters.get("start");
+ String[] endParameter = (String[]) requestParameters.get("end");
+ long endTimestamp = System.currentTimeMillis();
+ if (endParameter != null && endParameter.length > 0 &&
+ endParameter[0].length() > 0) {
+ try {
+ endTimestamp = dateFormat.parse(endParameter[0]).getTime();
+ } catch (ParseException e) {
+ return null;
+ }
+ if (!endParameter[0].startsWith("20")) {
+ return null;
+ }
+ }
+ endParameter = new String[] { dateFormat.format(endTimestamp) };
+ long startTimestamp = endTimestamp - 90L * 24L * 60L * 60L * 1000L;
+ if (startParameter != null && startParameter.length > 0 &&
+ startParameter[0].length() > 0) {
+ try {
+ startTimestamp = dateFormat.parse(startParameter[0]).getTime();
+ } catch (ParseException e) {
+ return null;
+ }
+ if (!startParameter[0].startsWith("20")) {
+ return null;
+ }
+ }
+ startParameter = new String[] { dateFormat.format(startTimestamp) };
+ if (startTimestamp > endTimestamp) {
+ return null;
+ }
+ recognizedGraphParameters.put("start", startParameter);
+ recognizedGraphParameters.put("end", endParameter);
+ }
+
+ /* Parse relay flags if supported by the graph type. If no relay flags
+ * are passed or none of them have been recognized, use the set of all
+ * known flags as default. */
+ if (supportedGraphParameters.contains("flag")) {
+ String[] flagParameters = (String[]) requestParameters.get("flag");
+ List<String> knownFlags = Arrays.asList(
+ this.knownParameterValues.get("flag").split(","));
+ if (flagParameters != null) {
+ for (String flag : flagParameters) {
+ if (flag == null || flag.length() == 0 ||
+ !knownFlags.contains(flag)) {
+ return null;
+ }
+ }
+ } else {
+ flagParameters = "Running,Exit,Guard,Fast,Stable".split(",");
+ }
+ recognizedGraphParameters.put("flag", flagParameters);
+ }
+
+ /* Parse country codes if supported by the graph type. If no countries
+ * are passed, use country code "all" (all countries) as default. */
+ if (supportedGraphParameters.contains("country")) {
+ String[] countryParameters = (String[]) requestParameters.get(
+ "country");
+ List<String> knownCountries = Arrays.asList(
+ this.knownParameterValues.get("country").split(","));
+ if (countryParameters != null) {
+ for (String country : countryParameters) {
+ if (country == null || country.length() == 0 ||
+ !knownCountries.contains(country)) {
+ return null;
+ }
+ }
+ } else {
+ countryParameters = new String[] { "all" };
+ }
+ recognizedGraphParameters.put("country", countryParameters);
+ }
+
+ /* Parse whether the estimated min/max range shall be displayed if
+ * supported by the graph type. This parameter can either be "on" or
+ * "off," where "off" is the default. */
+ if (supportedGraphParameters.contains("events")) {
+ String[] eventsParameter = (String[]) requestParameters.get(
+ "events");
+ List<String> knownRanges = Arrays.asList(
+ this.knownParameterValues.get("events").split(","));
+ if (eventsParameter != null) {
+ if (eventsParameter.length != 1 ||
+ eventsParameter[0].length() == 0 ||
+ !knownRanges.contains(eventsParameter[0])) {
+ return null;
+ }
+ } else {
+ eventsParameter = new String[] { "off" };
+ }
+ recognizedGraphParameters.put("events", eventsParameter);
+ }
+
+ /* Parse torperf data source if supported by the graph type. Only a
+ * single source can be passed. If no source is passed, use "torperf"
+ * as default. */
+ if (supportedGraphParameters.contains("source")) {
+ String[] sourceParameter = (String[]) requestParameters.get(
+ "source");
+ List<String> knownSources = Arrays.asList(
+ this.knownParameterValues.get("source").split(","));
+ if (sourceParameter != null) {
+ if (sourceParameter.length != 1) {
+ return null;
+ }
+ if (sourceParameter[0].length() == 0 ||
+ !knownSources.contains(sourceParameter[0])) {
+ return null;
+ }
+ } else {
+ sourceParameter = new String[] { "all" };
+ }
+ recognizedGraphParameters.put("source", sourceParameter);
+ }
+
+ /* Parse torperf file size if supported by the graph type. Only a
+ * single file size can be passed. If no file size is passed, use
+ * "50kb" as default. */
+ if (supportedGraphParameters.contains("filesize")) {
+ String[] filesizeParameter = (String[]) requestParameters.get(
+ "filesize");
+ List<String> knownFilesizes = Arrays.asList(
+ this.knownParameterValues.get("filesize").split(","));
+ if (filesizeParameter != null) {
+ if (filesizeParameter.length != 1) {
+ return null;
+ }
+ if (filesizeParameter[0].length() == 0 ||
+ !knownFilesizes.contains(filesizeParameter[0])) {
+ return null;
+ }
+ } else {
+ filesizeParameter = new String[] { "50kb" };
+ }
+ recognizedGraphParameters.put("filesize", filesizeParameter);
+ }
+
+ /* Parse transports if supported by the graph type. If no transports
+ * are passed, use "<OR>" as default. */
+ if (supportedGraphParameters.contains("transport")) {
+ String[] transportParameters = (String[]) requestParameters.get(
+ "transport");
+ List<String> knownTransports = Arrays.asList(
+ this.knownParameterValues.get("transport").split(","));
+ if (transportParameters != null) {
+ for (String transport : transportParameters) {
+ if (transport == null || transport.length() == 0 ||
+ !knownTransports.contains(transport)) {
+ return null;
+ }
+ }
+ } else {
+ transportParameters = new String[] { "<OR>" };
+ }
+ recognizedGraphParameters.put("transport", transportParameters);
+ }
+
+ /* Parse versions if supported by the graph type. If no versions
+ * are passed, use "v4" as default. */
+ if (supportedGraphParameters.contains("version")) {
+ String[] versionParameters = (String[]) requestParameters.get(
+ "version");
+ List<String> knownVersions = Arrays.asList(
+ this.knownParameterValues.get("version").split(","));
+ if (versionParameters != null) {
+ for (String version : versionParameters) {
+ if (version == null || version.length() == 0 ||
+ !knownVersions.contains(version)) {
+ return null;
+ }
+ }
+ } else {
+ versionParameters = new String[] { "v4" };
+ }
+ recognizedGraphParameters.put("version", versionParameters);
+ }
+
+ /* We now have a map with all required graph parameters. Return it. */
+ return recognizedGraphParameters;
+ }
+}
+
diff --git a/website/src/org/torproject/metrics/web/graphs/GraphsSubpagesServlet.java b/website/src/org/torproject/metrics/web/graphs/GraphsSubpagesServlet.java
new file mode 100644
index 0000000..35a6a7a
--- /dev/null
+++ b/website/src/org/torproject/metrics/web/graphs/GraphsSubpagesServlet.java
@@ -0,0 +1,162 @@
+/* Copyright 2011, 2012 The Tor Project
+ * See LICENSE for licensing information */
+package org.torproject.metrics.web.graphs;
+
+import java.io.IOException;
+import java.text.SimpleDateFormat;
+import java.util.Arrays;
+import java.util.Date;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+import java.util.TimeZone;
+
+import javax.servlet.ServletException;
+import javax.servlet.http.HttpServlet;
+import javax.servlet.http.HttpServletRequest;
+import javax.servlet.http.HttpServletResponse;
+
+public class GraphsSubpagesServlet extends HttpServlet {
+
+ private static final long serialVersionUID = -5959829347747628403L;
+
+ /* Available graphs subpages with corresponding JSP to which requests
+ * are forwarded. */
+ private Map<String, String> availableGraphsSubpages;
+
+ /* Available tables on graphs subpages. */
+ private Map<String, Set<String>> availableGraphsSubpageTables;
+
+ /* Country codes and names for per-country graphs. */
+ private List<String[]> knownCountries;
+
+ /* R object generator for generating table data. */
+ private RObjectGenerator rObjectGenerator;
+
+ public GraphsSubpagesServlet() {
+ this.availableGraphsSubpages = new HashMap<String, String>();
+ this.availableGraphsSubpages.put("network.html",
+ "WEB-INF/network.jsp");
+ this.availableGraphsSubpages.put("fast-exits.html",
+ "WEB-INF/fast-exits.jsp");
+ this.availableGraphsSubpages.put("users.html", "WEB-INF/users.jsp");
+ this.availableGraphsSubpages.put("performance.html",
+ "WEB-INF/performance.jsp");
+
+ this.availableGraphsSubpageTables =
+ new HashMap<String, Set<String>>();
+ this.availableGraphsSubpageTables.put("users.html",
+ new HashSet<String>(Arrays.asList((
+ "direct-users,censorship-events,bridge-users,userstats-relay,"
+ + "userstats-censorship-events,userstats-bridge").split(","))));
+
+ this.knownCountries = Countries.getInstance().getCountryList();
+ }
+
+ public void init() {
+ /* Get a reference to the R object generator that we need to generate
+ * table data. */
+ this.rObjectGenerator = (RObjectGenerator) getServletContext().
+ getAttribute("RObjectGenerator");
+ }
+
+ public void doGet(HttpServletRequest request,
+ HttpServletResponse response) throws IOException, ServletException {
+
+ /* Find out which graph subpage was requested and look up which JSP
+ * handles this subpage. */
+ String requestedPage = request.getRequestURI();
+ if (requestedPage == null) {
+ response.sendError(HttpServletResponse.SC_BAD_REQUEST);
+ return;
+ }
+ if (requestedPage.contains("/")) {
+ requestedPage = requestedPage.substring(requestedPage.
+ lastIndexOf("/") + 1);
+ }
+ if (!availableGraphsSubpages.containsKey(requestedPage)) {
+ response.sendError(HttpServletResponse.SC_INTERNAL_SERVER_ERROR);
+ return;
+ }
+ String jsp = availableGraphsSubpages.get(requestedPage);
+
+ /* Find out which graph or table type was requested, if any. */
+ String requestedGraph = request.getParameter("graph");
+ String requestedTable = request.getParameter("table");
+ if (requestedGraph != null) {
+
+ /* Check if the passed parameters are valid. */
+ Map<String, String[]> checkedParameters = GraphParameterChecker.
+ getInstance().checkParameters(requestedGraph,
+ request.getParameterMap());
+ if (checkedParameters != null) {
+
+ /* Set the graph's attributes to the appropriate values, so that
+ * we can display the correct graph and prepopulate the form. */
+ StringBuilder urlBuilder = new StringBuilder();
+ for (Map.Entry<String, String[]> param :
+ checkedParameters.entrySet()) {
+ request.setAttribute(requestedGraph.replaceAll("-", "_") + "_"
+ + param.getKey(), param.getValue());
+ for (String paramValue : param.getValue()) {
+ urlBuilder.append("&" + param.getKey() + "=" + paramValue);
+ }
+ }
+ String url = "?" + urlBuilder.toString().substring(1);
+ request.setAttribute(requestedGraph.replaceAll("-", "_") + "_url",
+ url);
+ }
+ }
+ if (requestedTable != null) {
+
+ /* Check if the passed parameters are valid. */
+ Map<String, String[]> checkedParameters = TableParameterChecker.
+ getInstance().checkParameters(requestedTable,
+ request.getParameterMap());
+ if (checkedParameters != null) {
+
+ /* Set the table's attributes to the appropriate values, so that
+ * we can prepopulate the form. */
+ for (Map.Entry<String, String[]> param :
+ checkedParameters.entrySet()) {
+ request.setAttribute(requestedTable.replaceAll("-", "_") + "_"
+ + param.getKey(), param.getValue());
+ }
+ }
+ }
+
+ /* Generate table data if the graphs subpage has any tables,
+ * regardless of whether a table update was requested, and add the
+ * table data as request attribute. */
+ if (this.availableGraphsSubpageTables.containsKey(requestedPage)) {
+ for (String tableName :
+ this.availableGraphsSubpageTables.get(requestedPage)) {
+ List<Map<String, String>> tableData = rObjectGenerator.
+ generateTable(tableName, requestedTable,
+ request.getParameterMap(), true);
+ request.setAttribute(tableName.replaceAll("-", "_")
+ + "_tabledata", tableData);
+ }
+ }
+
+ /* Pass list of known countries in case we want to display them. */
+ request.setAttribute("countries", this.knownCountries);
+
+ /* Pass the default start and end dates. */
+ SimpleDateFormat dateFormat = new SimpleDateFormat("yyyy-MM-dd");
+ dateFormat.setTimeZone(TimeZone.getTimeZone("UTC"));
+ Date defaultEndDate = new Date();
+ Date defaultStartDate = new Date(defaultEndDate.getTime()
+ - 90L * 24L * 60L * 60L * 1000L);
+ request.setAttribute("default_start_date",
+ dateFormat.format(defaultStartDate));
+ request.setAttribute("default_end_date",
+ dateFormat.format(defaultEndDate));
+
+ /* Forward the request to the JSP that does all the hard work. */
+ request.getRequestDispatcher(jsp).forward(request, response);
+ }
+}
+
diff --git a/website/src/org/torproject/metrics/web/graphs/RObject.java b/website/src/org/torproject/metrics/web/graphs/RObject.java
new file mode 100644
index 0000000..db8f362
--- /dev/null
+++ b/website/src/org/torproject/metrics/web/graphs/RObject.java
@@ -0,0 +1,23 @@
+/* Copyright 2011, 2012 The Tor Project
+ * See LICENSE for licensing information */
+package org.torproject.metrics.web.graphs;
+
+public class RObject {
+ private byte[] bytes;
+ private String fileName;
+ private long lastModified;
+ public RObject(byte[] bytes, String fileName, long lastModified) {
+ this.bytes = bytes;
+ this.fileName = fileName;
+ this.lastModified = lastModified;
+ }
+ public String getFileName() {
+ return this.fileName;
+ }
+ public byte[] getBytes() {
+ return this.bytes;
+ }
+ public long getLastModified() {
+ return this.lastModified;
+ }
+}
diff --git a/website/src/org/torproject/metrics/web/graphs/RObjectGenerator.java b/website/src/org/torproject/metrics/web/graphs/RObjectGenerator.java
new file mode 100644
index 0000000..8b64ff7
--- /dev/null
+++ b/website/src/org/torproject/metrics/web/graphs/RObjectGenerator.java
@@ -0,0 +1,398 @@
+/* Copyright 2011, 2012 The Tor Project
+ * See LICENSE for licensing information */
+package org.torproject.metrics.web.graphs;
+
+import java.io.BufferedInputStream;
+import java.io.BufferedReader;
+import java.io.ByteArrayInputStream;
+import java.io.ByteArrayOutputStream;
+import java.io.File;
+import java.io.FileInputStream;
+import java.io.IOException;
+import java.io.InputStreamReader;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+import java.util.SortedSet;
+import java.util.TreeSet;
+
+import javax.servlet.ServletContext;
+import javax.servlet.ServletContextEvent;
+import javax.servlet.ServletContextListener;
+
+import org.rosuda.REngine.Rserve.RConnection;
+import org.rosuda.REngine.Rserve.RserveException;
+
+public class RObjectGenerator implements ServletContextListener {
+
+ /* Host and port where Rserve is listening. */
+ private String rserveHost;
+ private int rservePort;
+
+ /* Some parameters for our cache of graph images. */
+ private String cachedGraphsDirectory;
+ private long maxCacheAge;
+
+ private SortedSet<String> availableCsvFiles;
+ private Map<String, String> availableTables;
+ private Map<String, String> availableGraphs;
+ private Set<String> availableGraphFileTypes;
+
+ public void contextInitialized(ServletContextEvent event) {
+
+ /* Initialize using context parameters. */
+ ServletContext servletContext = event.getServletContext();
+ this.rserveHost = servletContext.getInitParameter("rserveHost");
+ this.rservePort = Integer.parseInt(servletContext.getInitParameter(
+ "rservePort"));
+ this.maxCacheAge = Long.parseLong(servletContext.getInitParameter(
+ "maxCacheAge"));
+ this.cachedGraphsDirectory = servletContext.getInitParameter(
+ "cachedGraphsDir");
+
+ /* Initialize map of available CSV files. */
+ this.availableCsvFiles = new TreeSet<String>();
+ this.availableCsvFiles.add("bandwidth");
+ this.availableCsvFiles.add("bandwidth-flags");
+ this.availableCsvFiles.add("bwhist-flags");
+ this.availableCsvFiles.add("connbidirect");
+ this.availableCsvFiles.add("cloudbridges");
+ this.availableCsvFiles.add("dirbytes");
+ this.availableCsvFiles.add("monthly-userstats-average");
+ this.availableCsvFiles.add("monthly-userstats-peak");
+ this.availableCsvFiles.add("networksize");
+ this.availableCsvFiles.add("platforms");
+ this.availableCsvFiles.add("relaycountries");
+ this.availableCsvFiles.add("relayflags");
+ this.availableCsvFiles.add("torperf");
+ this.availableCsvFiles.add("torperf-failures");
+ this.availableCsvFiles.add("userstats");
+ this.availableCsvFiles.add("userstats-detector");
+ this.availableCsvFiles.add("versions");
+
+ this.availableTables = new HashMap<String, String>();
+ this.availableTables.put("userstats-relay", "start,end,filename");
+ this.availableTables.put("userstats-bridge", "start,end,filename");
+ this.availableTables.put("userstats-censorship-events",
+ "start,end,filename");
+ TableParameterChecker.getInstance().setAvailableTables(
+ availableTables);
+
+ this.availableGraphs = new HashMap<String, String>();
+ this.availableGraphs.put("networksize", "start,end,filename");
+ this.availableGraphs.put("cloudbridges", "start,end,filename");
+ this.availableGraphs.put("relaycountries",
+ "start,end,country,filename");
+ this.availableGraphs.put("relayflags", "start,end,flag,filename");
+ this.availableGraphs.put("versions", "start,end,filename");
+ this.availableGraphs.put("platforms", "start,end,filename");
+ this.availableGraphs.put("bandwidth", "start,end,filename");
+ this.availableGraphs.put("bandwidth-flags", "start,end,filename");
+ this.availableGraphs.put("bwhist-flags", "start,end,filename");
+ this.availableGraphs.put("dirbytes", "start,end,filename");
+ this.availableGraphs.put("torperf",
+ "start,end,source,filesize,filename");
+ this.availableGraphs.put("torperf-failures",
+ "start,end,source,filesize,filename");
+ this.availableGraphs.put("connbidirect", "start,end,filename");
+ this.availableGraphs.put("fast-exits", "start,end,filename");
+ this.availableGraphs.put("almost-fast-exits", "start,end,filename");
+ this.availableGraphs.put("userstats-relay-country",
+ "start,end,country,events,filename");
+ this.availableGraphs.put("userstats-bridge-country",
+ "start,end,country,filename");
+ this.availableGraphs.put("userstats-bridge-transport",
+ "start,end,transport,filename");
+ this.availableGraphs.put("userstats-bridge-version",
+ "start,end,version,filename");
+ this.availableGraphFileTypes = new HashSet<String>(Arrays.asList(
+ "png,pdf,svg".split(",")));
+ GraphParameterChecker.getInstance().setAvailableGraphs(
+ availableGraphs);
+
+ /* Register ourself, so that servlets can use us. */
+ servletContext.setAttribute("RObjectGenerator", this);
+
+ /* Periodically generate R objects with default parameters. */
+ new Thread() {
+ public void run() {
+ long lastUpdated = 0L, sleep;
+ while (true) {
+ while ((sleep = maxCacheAge * 1000L / 2L + lastUpdated
+ - System.currentTimeMillis()) > 0L) {
+ try {
+ Thread.sleep(sleep);
+ } catch (InterruptedException e) {
+ }
+ }
+ for (String csvFile : availableCsvFiles) {
+ generateCsv(csvFile, false);
+ }
+ for (String tableName : availableTables.keySet()) {
+ generateTable(tableName, tableName, new HashMap(), false);
+ }
+ for (String graphName : availableGraphs.keySet()) {
+ for (String fileType : availableGraphFileTypes) {
+ generateGraph(graphName, fileType, new HashMap(), false);
+ }
+ }
+ lastUpdated = System.currentTimeMillis();
+ }
+ };
+ }.start();
+ }
+
+ public void contextDestroyed(ServletContextEvent event) {
+ /* Nothing to do. */
+ }
+
+ public RObject generateGraph(String requestedGraph, String fileType,
+ Map parameterMap, boolean checkCache) {
+ Map<String, String[]> checkedParameters = GraphParameterChecker.
+ getInstance().checkParameters(requestedGraph, parameterMap);
+ if (checkedParameters == null) {
+ /* TODO We're going to take the blame by sending an internal server
+ * error to the client, but really the user is to blame. */
+ return null;
+ }
+ StringBuilder rQueryBuilder = new StringBuilder("plot_"
+ + requestedGraph.replaceAll("-", "_") + "("),
+ imageFilenameBuilder = new StringBuilder(requestedGraph);
+ for (Map.Entry<String, String[]> parameter :
+ checkedParameters.entrySet()) {
+ String parameterName = parameter.getKey();
+ String[] parameterValues = parameter.getValue();
+ for (String param : parameterValues) {
+ imageFilenameBuilder.append("-" + param);
+ }
+ if (parameterValues.length < 2) {
+ rQueryBuilder.append(parameterName + " = '" + parameterValues[0]
+ + "', ");
+ } else {
+ rQueryBuilder.append(parameterName + " = c(");
+ for (int i = 0; i < parameterValues.length - 1; i++) {
+ rQueryBuilder.append("'" + parameterValues[i] + "', ");
+ }
+ rQueryBuilder.append("'" + parameterValues[
+ parameterValues.length - 1] + "'), ");
+ }
+ }
+ imageFilenameBuilder.append("." + fileType);
+ String imageFilename = imageFilenameBuilder.toString();
+ rQueryBuilder.append("path = '%s')");
+ String rQuery = rQueryBuilder.toString();
+ File imageFile = new File(this.cachedGraphsDirectory + "/"
+ + imageFilename);
+ return this.generateRObject(rQuery, imageFile, imageFilename,
+ checkCache);
+ }
+
+ public SortedSet<String> getAvailableCsvFiles() {
+ return this.availableCsvFiles;
+ }
+
+ public RObject generateCsv(String requestedCsvFile,
+ boolean checkCache) {
+ /* Prepare filename and R query string. */
+ String rQuery = "export_" + requestedCsvFile.replaceAll("-", "_")
+ + "(path = '%s')";
+ String csvFilename = requestedCsvFile + ".csv";
+
+ /* See if we need to generate this .csv file. */
+ File csvFile = new File(this.cachedGraphsDirectory + "/"
+ + csvFilename);
+ return this.generateRObject(rQuery, csvFile, csvFilename, checkCache);
+ }
+
+ public List<Map<String, String>> generateTable(String tableName,
+ String requestedTable, Map parameterMap, boolean checkCache) {
+
+ Map<String, String[]> checkedParameters = null;
+ if (tableName.equals(requestedTable)) {
+ checkedParameters = TableParameterChecker.
+ getInstance().checkParameters(requestedTable,
+ parameterMap);
+ } else {
+ checkedParameters = TableParameterChecker.
+ getInstance().checkParameters(tableName, null);
+ }
+ if (checkedParameters == null) {
+ /* TODO We're going to take the blame by sending an internal server
+ * error to the client, but really the user is to blame. */
+ return null;
+ }
+ StringBuilder rQueryBuilder = new StringBuilder("write_"
+ + tableName.replaceAll("-", "_") + "("),
+ tableFilenameBuilder = new StringBuilder(tableName);
+
+ for (Map.Entry<String, String[]> parameter :
+ checkedParameters.entrySet()) {
+ String parameterName = parameter.getKey();
+ String[] parameterValues = parameter.getValue();
+ for (String param : parameterValues) {
+ tableFilenameBuilder.append("-" + param);
+ }
+ if (parameterValues.length < 2) {
+ rQueryBuilder.append(parameterName + " = '"
+ + parameterValues[0] + "', ");
+ } else {
+ rQueryBuilder.append(parameterName + " = c(");
+ for (int i = 0; i < parameterValues.length - 1; i++) {
+ rQueryBuilder.append("'" + parameterValues[i] + "', ");
+ }
+ rQueryBuilder.append("'" + parameterValues[
+ parameterValues.length - 1] + "'), ");
+ }
+ }
+ tableFilenameBuilder.append(".tbl");
+ String tableFilename = tableFilenameBuilder.toString();
+ rQueryBuilder.append("path = '%s')");
+ String rQuery = rQueryBuilder.toString();
+ return this.generateTable(rQuery, tableFilename, checkCache);
+ }
+
+ /* Generate table data using the given R query and filename or read
+ * previously generated table data from disk if it's not too old and
+ * return table data. */
+ private List<Map<String, String>> generateTable(String rQuery,
+ String tableFilename, boolean checkCache) {
+
+ /* See if we need to generate this table. */
+ File tableFile = new File(this.cachedGraphsDirectory + "/"
+ + tableFilename);
+ byte[] tableBytes = this.generateRObject(rQuery, tableFile,
+ tableFilename, checkCache).getBytes();
+
+ /* Write the table content to a map. */
+ List<Map<String, String>> result = null;
+ try {
+ result = new ArrayList<Map<String, String>>();
+ BufferedReader br = new BufferedReader(new InputStreamReader(
+ new ByteArrayInputStream(tableBytes)));
+ String line = br.readLine();
+ if (line != null) {
+ List<String> headers = new ArrayList<String>(Arrays.asList(
+ line.split(",")));
+ while ((line = br.readLine()) != null) {
+ String[] parts = line.split(",");
+ if (headers.size() != parts.length) {
+ return null;
+ }
+ Map<String, String> row = new HashMap<String, String>();
+ for (int i = 0; i < headers.size(); i++) {
+ row.put(headers.get(i), parts[i]);
+ }
+ result.add(row);
+ }
+ }
+ } catch (IOException e) {
+ return null;
+ }
+
+ /* Return table values. */
+ return result;
+ }
+
+ /* Generate an R object in a separate worker thread, or wait for an
+ * already running worker thread to finish and get its result. */
+ private RObject generateRObject(String rQuery, File rObjectFile,
+ String fileName, boolean checkCache) {
+ RObjectGeneratorWorker worker = null;
+ synchronized (this.rObjectGeneratorThreads) {
+ if (this.rObjectGeneratorThreads.containsKey(rQuery)) {
+ worker = this.rObjectGeneratorThreads.get(rQuery);
+ } else {
+ worker = new RObjectGeneratorWorker(rQuery, rObjectFile,
+ fileName, checkCache);
+ this.rObjectGeneratorThreads.put(rQuery, worker);
+ worker.start();
+ }
+ }
+ try {
+ worker.join();
+ } catch (InterruptedException e) {
+ }
+ synchronized (this.rObjectGeneratorThreads) {
+ if (this.rObjectGeneratorThreads.containsKey(rQuery) &&
+ this.rObjectGeneratorThreads.get(rQuery) == worker) {
+ this.rObjectGeneratorThreads.remove(rQuery);
+ }
+ }
+ return worker.getRObject();
+ }
+
+ private Map<String, RObjectGeneratorWorker> rObjectGeneratorThreads =
+ new HashMap<String, RObjectGeneratorWorker>();
+
+ private class RObjectGeneratorWorker extends Thread {
+
+ private String rQuery;
+ private File rObjectFile;
+ private String fileName;
+ private boolean checkCache;
+ private RObject result = null;
+
+ public RObjectGeneratorWorker(String rQuery, File rObjectFile,
+ String fileName, boolean checkCache) {
+ this.rQuery = rQuery;
+ this.rObjectFile = rObjectFile;
+ this.fileName = fileName;
+ this.checkCache = checkCache;
+ }
+
+ public void run() {
+
+ /* See if we need to generate this R object. */
+ long now = System.currentTimeMillis();
+ if (!this.checkCache || !this.rObjectFile.exists() ||
+ this.rObjectFile.lastModified() < now - maxCacheAge * 1000L) {
+
+ /* We do. Update the R query to contain the absolute path to the
+ * file to be generated, create a connection to Rserve, run the R
+ * query, and close the connection. The generated object will be
+ * on disk. */
+ this.rQuery = String.format(this.rQuery,
+ this.rObjectFile.getAbsolutePath());
+ try {
+ RConnection rc = new RConnection(rserveHost, rservePort);
+ rc.eval(this.rQuery);
+ rc.close();
+ } catch (RserveException e) {
+ return;
+ }
+
+ /* Check that we really just generated the R object. */
+ if (!this.rObjectFile.exists() || this.rObjectFile.lastModified()
+ < now - maxCacheAge * 1000L) {
+ return;
+ }
+ }
+
+ /* Read the R object from disk and write it to a byte array. */
+ long lastModified = this.rObjectFile.lastModified();
+ try {
+ BufferedInputStream bis = new BufferedInputStream(
+ new FileInputStream(this.rObjectFile), 1024);
+ ByteArrayOutputStream baos = new ByteArrayOutputStream();
+ byte[] buffer = new byte[1024];
+ int length;
+ while ((length = bis.read(buffer)) > 0) {
+ baos.write(buffer, 0, length);
+ }
+ bis.close();
+ this.result = new RObject(baos.toByteArray(), this.fileName,
+ lastModified);
+ } catch (IOException e) {
+ return;
+ }
+ }
+
+ public RObject getRObject() {
+ return this.result;
+ }
+ }
+}
diff --git a/website/src/org/torproject/metrics/web/graphs/TableParameterChecker.java b/website/src/org/torproject/metrics/web/graphs/TableParameterChecker.java
new file mode 100644
index 0000000..0be736e
--- /dev/null
+++ b/website/src/org/torproject/metrics/web/graphs/TableParameterChecker.java
@@ -0,0 +1,120 @@
+/* Copyright 2011, 2012 The Tor Project
+ * See LICENSE for licensing information */
+package org.torproject.metrics.web.graphs;
+
+import java.text.ParseException;
+import java.text.SimpleDateFormat;
+import java.util.Arrays;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.Map;
+import java.util.Set;
+import java.util.TimeZone;
+
+/**
+ * Checks request parameters passed to generate tables.
+ */
+public class TableParameterChecker {
+
+ /**
+ * Singleton instance of this class.
+ */
+ private static TableParameterChecker instance =
+ new TableParameterChecker();
+
+ /**
+ * Returns the singleton instance of this class.
+ */
+ public static TableParameterChecker getInstance() {
+ return instance;
+ }
+
+ /* Date format for parsing start and end dates. */
+ private SimpleDateFormat dateFormat;
+
+ /* Available tables with corresponding parameter lists. */
+ private Map<String, String> availableTables;
+
+ /**
+ * Initializes map with valid parameters for each of the graphs.
+ */
+ public TableParameterChecker() {
+ this.dateFormat = new SimpleDateFormat("yyyy-MM-dd");
+ this.dateFormat.setTimeZone(TimeZone.getTimeZone("UTC"));
+ }
+
+ public void setAvailableTables(Map<String, String> availableTables) {
+ this.availableTables = availableTables;
+ }
+
+ /**
+ * Checks request parameters for the given table type and returns a map
+ * of recognized parameters, or null if the table type doesn't exist or
+ * the parameters are invalid.
+ */
+ public Map<String, String[]> checkParameters(String tableType,
+ Map requestParameters) {
+
+ /* Check if the table type exists. */
+ if (tableType == null ||
+ !this.availableTables.containsKey(tableType)) {
+ return null;
+ }
+
+ /* Find out which other parameters are supported by this table type
+ * and parse them if they are given. */
+ Set<String> supportedTableParameters = new HashSet<String>(Arrays.
+ asList(this.availableTables.get(tableType).split(",")));
+ Map<String, String[]> recognizedTableParameters =
+ new HashMap<String, String[]>();
+
+ /* Parse start and end dates if supported by the table type. If no end
+ * date is provided, set it to today. If no start date is provided,
+ * set it to 90 days before the end date. Make sure that start date
+ * precedes end date. */
+ if (supportedTableParameters.contains("start") ||
+ supportedTableParameters.contains("end")) {
+ String[] startParameter = null;
+ String[] endParameter = null;
+ if (requestParameters != null) {
+ startParameter = (String[]) requestParameters.get("start");
+ endParameter = (String[]) requestParameters.get("end");
+ }
+ long endTimestamp = System.currentTimeMillis();
+ if (endParameter != null && endParameter.length > 0 &&
+ endParameter[0].length() > 0) {
+ try {
+ endTimestamp = dateFormat.parse(endParameter[0]).getTime();
+ } catch (ParseException e) {
+ return null;
+ }
+ if (!endParameter[0].startsWith("20")) {
+ return null;
+ }
+ }
+ endParameter = new String[] { dateFormat.format(endTimestamp) };
+ long startTimestamp = endTimestamp - 90L * 24L * 60L * 60L * 1000L;
+ if (startParameter != null && startParameter.length > 0 &&
+ startParameter[0].length() > 0) {
+ try {
+ startTimestamp = dateFormat.parse(startParameter[0]).getTime();
+ } catch (ParseException e) {
+ return null;
+ }
+ if (!startParameter[0].startsWith("20")) {
+ return null;
+ }
+ }
+ startParameter = new String[] { dateFormat.format(startTimestamp) };
+ if (startTimestamp > endTimestamp) {
+ return null;
+ }
+ recognizedTableParameters.put("start", startParameter);
+ recognizedTableParameters.put("end", endParameter);
+ }
+
+ /* We now have a map with all required table parameters. Return it. */
+ return recognizedTableParameters;
+ }
+}
+
diff --git a/website/src/org/torproject/metrics/web/research/ResearchDataServlet.java b/website/src/org/torproject/metrics/web/research/ResearchDataServlet.java
new file mode 100644
index 0000000..8e3788d
--- /dev/null
+++ b/website/src/org/torproject/metrics/web/research/ResearchDataServlet.java
@@ -0,0 +1,260 @@
+/* Copyright 2011, 2012 The Tor Project
+ * See LICENSE for licensing information */
+package org.torproject.metrics.web.research;
+
+import java.io.File;
+import java.io.IOException;
+import java.text.ParseException;
+import java.text.SimpleDateFormat;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Date;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.SortedMap;
+import java.util.TreeMap;
+
+import javax.servlet.ServletException;
+import javax.servlet.http.HttpServlet;
+import javax.servlet.http.HttpServletRequest;
+import javax.servlet.http.HttpServletResponse;
+
+/**
+ * Controller servlet for the Data page. Prepares the various lists of
+ * downloadable metrics data files by parsing a file with URLs on other
+ * servers and looking at a local directory with files served by local
+ * Apache HTTP server. The file with URLs on other servers may contain
+ * comment lines starting with #. Recognizes metrics data file types from
+ * the file names.
+ */
+public class ResearchDataServlet extends HttpServlet {
+
+ private static final long serialVersionUID = -5168280373350515577L;
+
+ public void doGet(HttpServletRequest request,
+ HttpServletResponse response) throws IOException, ServletException {
+
+ /* Read local directory with files served by the local Apache HTTP
+ * server and add the URLs to the list. */
+ List<String> dataFileUrls = new ArrayList<String>();
+ String localDataDir = getServletConfig().getInitParameter(
+ "localDataDir");
+ if (localDataDir != null) {
+ try {
+ File localDataDirFile = new File(localDataDir);
+ if (localDataDirFile.exists() && localDataDirFile.isDirectory()) {
+ for (File localDataFile : localDataDirFile.listFiles()) {
+ if (!localDataFile.isDirectory()) {
+ dataFileUrls.add("/data/" + localDataFile.getName());
+ }
+ }
+ }
+ } catch (SecurityException e) {
+ /* We're not permitted to read the directory with metrics data
+ * files. Ignore. */
+ }
+ }
+
+ /* Prepare data structures that we're going to pass to the JSP. All
+ * data structures are (nested) maps with the map keys being used for
+ * displaying the files in tables and map values being 2-element
+ * arrays containing the file url and optional signature file. */
+ SortedMap<Date, Map<String, String[]>> relayDescriptors =
+ new TreeMap<Date, Map<String, String[]>>(
+ java.util.Collections.reverseOrder());
+ String[] certs = new String[2];
+ SortedMap<Date, String[]> bridgeDescriptors =
+ new TreeMap<Date, String[]>(java.util.Collections.reverseOrder());
+ String[] relayStatistics = new String[2];
+ SortedMap<Date, String[]> torperfTarballs =
+ new TreeMap<Date, String[]>(java.util.Collections.reverseOrder());
+ SortedMap<String, Map<String, String[]>> torperfData =
+ new TreeMap<String, Map<String, String[]>>();
+ SortedMap<Date, String[]> exitLists =
+ new TreeMap<Date, String[]>(java.util.Collections.reverseOrder());
+ SortedMap<Date, String[]> torperfExperiments =
+ new TreeMap<Date, String[]>();
+ SortedMap<Date, String[]> bridgePoolAssignments =
+ new TreeMap<Date, String[]>(java.util.Collections.reverseOrder());
+
+ /* Prepare rewriting Torperf sources. */
+ Map<String, String> torperfSources = new HashMap<String, String>();
+ torperfSources.put("torperffast", "torperf, fastest");
+ torperfSources.put("torperffastratio", "torperf, best ratio");
+ torperfSources.put("torperfslow", "torperf, slowest");
+ torperfSources.put("torperfslowratio", "torperf, worst ratio");
+
+ /* Go through the file list, decide for each file what metrics data
+ * type it is, and put it in the appropriate map. */
+ SimpleDateFormat monthFormat = new SimpleDateFormat("yyyy-MM");
+ SimpleDateFormat dateFormat = new SimpleDateFormat("yyyy-MM-dd");
+ List<String> torperfFilesizes = Arrays.asList("50kb,1mb,5mb".
+ split(","));
+ for (String url : dataFileUrls) {
+ if (!url.contains("/")) {
+ continue;
+ }
+ String filename = url.substring(url.lastIndexOf("/") + 1);
+
+ /* URL contains relay descriptors. */
+ if (filename.startsWith("tor-20") ||
+ filename.startsWith("statuses-20") ||
+ filename.startsWith("server-descriptors-20") ||
+ filename.startsWith("extra-infos-20") ||
+ filename.startsWith("votes-20") ||
+ filename.startsWith("consensuses-20")) {
+ String type = filename.substring(0, filename.indexOf("-20"));
+ String yearMonth = filename.substring(filename.indexOf("20"));
+ yearMonth = yearMonth.substring(0, 7);
+ Date month = null;
+ try {
+ month = monthFormat.parse(yearMonth);
+ } catch (ParseException e) {
+ /* Ignore this URL. */
+ continue;
+ }
+ int index = filename.endsWith(".asc") ? 1 : 0;
+ if (!relayDescriptors.containsKey(month)) {
+ relayDescriptors.put(month, new HashMap<String, String[]>());
+ }
+ if (!relayDescriptors.get(month).containsKey(type)) {
+ relayDescriptors.get(month).put(type, new String[2]);
+ }
+ relayDescriptors.get(month).get(type)[index] = url;
+
+ /* URL contains v3 certificates. */
+ } else if (filename.startsWith("certs.tar")) {
+ int index = filename.endsWith(".asc") ? 1 : 0;
+ certs[index] = url;
+
+ /* URL contains bridge descriptors. */
+ } else if (filename.startsWith("bridge-descriptors-20")) {
+ String yearMonth = filename.substring(filename.indexOf("20"));
+ yearMonth = yearMonth.substring(0, 7);
+ Date month = null;
+ try {
+ month = monthFormat.parse(yearMonth);
+ } catch (ParseException e) {
+ /* Ignore this URL. */
+ continue;
+ }
+ int index = filename.endsWith(".asc") ? 1 : 0;
+ if (!bridgeDescriptors.containsKey(month)) {
+ bridgeDescriptors.put(month, new String[2]);
+ }
+ bridgeDescriptors.get(month)[index] = url;
+
+ /* URL contains relay statistics. */
+ } else if (filename.startsWith("relay-statistics.tar.bz2")) {
+ int index = filename.endsWith(".asc") ? 1 : 0;
+ relayStatistics[index] = url;
+
+ /* URL contains Torperf tarball. */
+ } else if (filename.startsWith("torperf-20")) {
+ String yearMonth = filename.substring(filename.indexOf("20"));
+ yearMonth = yearMonth.substring(0, 7);
+ Date month = null;
+ try {
+ month = monthFormat.parse(yearMonth);
+ } catch (ParseException e) {
+ /* Ignore this URL. */
+ continue;
+ }
+ if (!torperfTarballs.containsKey(month)) {
+ torperfTarballs.put(month, new String[2]);
+ }
+ torperfTarballs.get(month)[0] = url;
+
+ /* URL contains Torperf data file. */
+ } else if (filename.endsWith("b.data") ||
+ filename.endsWith("b.extradata")) {
+ boolean isExtraData = filename.endsWith("b.extradata");
+ String[] parts = filename.split("-");
+ if (parts.length != 2) {
+ continue;
+ }
+ String source = parts[0];
+ if (torperfSources.containsKey(source)) {
+ source = torperfSources.get(source);
+ }
+ String filesize = parts[1];
+ filesize = filesize.substring(0, filesize.length()
+ - (isExtraData ? 10 : 5));
+ if (!torperfFilesizes.contains(filesize)) {
+ continue;
+ }
+ if (!torperfData.containsKey(source)) {
+ torperfData.put(source, new HashMap<String, String[]>());
+ }
+ if (!torperfData.get(source).containsKey(filesize)) {
+ torperfData.get(source).put(filesize, new String[2]);
+ }
+ torperfData.get(source).get(filesize)[isExtraData ? 1 : 0] = url;
+
+ /* URL contains Torperf experiment tarball. */
+ } else if (filename.startsWith("torperf-experiment-20")) {
+ String dateString = filename.substring(filename.indexOf("20"));
+ dateString = dateString.substring(0, 10);
+ Date date = null;
+ try {
+ date = dateFormat.parse(dateString);
+ } catch (ParseException e) {
+ /* Ignore this URL. */
+ continue;
+ }
+ if (!torperfExperiments.containsKey(date)) {
+ torperfExperiments.put(date, new String[2]);
+ }
+ torperfExperiments.get(date)[0] = url;
+
+ /* URL contains exit list. */
+ } else if (filename.startsWith("exit-list-20")) {
+ String yearMonth = filename.substring(filename.indexOf("20"));
+ yearMonth = yearMonth.substring(0, 7);
+ Date month = null;
+ try {
+ month = monthFormat.parse(yearMonth);
+ } catch (ParseException e) {
+ /* Ignore this URL. */
+ continue;
+ }
+ if (!exitLists.containsKey(month)) {
+ exitLists.put(month, new String[2]);
+ }
+ exitLists.get(month)[0] = url;
+
+ /* URL contains bridge pool assignments. */
+ } else if (filename.startsWith("bridge-pool-assignments-20")) {
+ String yearMonth = filename.substring(filename.indexOf("20"));
+ yearMonth = yearMonth.substring(0, 7);
+ Date month = null;
+ try {
+ month = monthFormat.parse(yearMonth);
+ } catch (ParseException e) {
+ /* Ignore this URL. */
+ continue;
+ }
+ if (!bridgePoolAssignments.containsKey(month)) {
+ bridgePoolAssignments.put(month, new String[2]);
+ }
+ bridgePoolAssignments.get(month)[0] = url;
+ }
+ }
+
+ /* Add the maps to the request and forward it to the JSP to display
+ * the page. */
+ request.setAttribute("relayDescriptors", relayDescriptors);
+ request.setAttribute("certs", certs);
+ request.setAttribute("bridgeDescriptors", bridgeDescriptors);
+ request.setAttribute("relayStatistics", relayStatistics);
+ request.setAttribute("torperfData", torperfData);
+ request.setAttribute("exitLists", exitLists);
+ request.setAttribute("torperfTarballs", torperfTarballs);
+ request.setAttribute("torperfExperiments", torperfExperiments);
+ request.setAttribute("bridgePoolAssignments", bridgePoolAssignments);
+ request.getRequestDispatcher("WEB-INF/data.jsp").forward(request,
+ response);
+ }
+}
+
diff --git a/website/src/org/torproject/metrics/web/research/ResearchFormatsServlet.java b/website/src/org/torproject/metrics/web/research/ResearchFormatsServlet.java
new file mode 100644
index 0000000..447ad44
--- /dev/null
+++ b/website/src/org/torproject/metrics/web/research/ResearchFormatsServlet.java
@@ -0,0 +1,24 @@
+/* Copyright 2011, 2012 The Tor Project
+ * See LICENSE for licensing information */
+package org.torproject.metrics.web.research;
+
+import java.io.IOException;
+
+import javax.servlet.ServletException;
+import javax.servlet.http.HttpServlet;
+import javax.servlet.http.HttpServletRequest;
+import javax.servlet.http.HttpServletResponse;
+
+public class ResearchFormatsServlet extends HttpServlet {
+
+ private static final long serialVersionUID = 5666493868675314116L;
+
+ public void doGet(HttpServletRequest request,
+ HttpServletResponse response) throws IOException, ServletException {
+
+ /* Forward the request to the JSP that does all the hard work. */
+ request.getRequestDispatcher("WEB-INF/formats.jsp").forward(request,
+ response);
+ }
+}
+
diff --git a/website/src/org/torproject/metrics/web/research/ResearchPapersServlet.java b/website/src/org/torproject/metrics/web/research/ResearchPapersServlet.java
new file mode 100644
index 0000000..88dc6fe
--- /dev/null
+++ b/website/src/org/torproject/metrics/web/research/ResearchPapersServlet.java
@@ -0,0 +1,24 @@
+/* Copyright 2011, 2012 The Tor Project
+ * See LICENSE for licensing information */
+package org.torproject.metrics.web.research;
+
+import java.io.IOException;
+
+import javax.servlet.ServletException;
+import javax.servlet.http.HttpServlet;
+import javax.servlet.http.HttpServletRequest;
+import javax.servlet.http.HttpServletResponse;
+
+public class ResearchPapersServlet extends HttpServlet {
+
+ private static final long serialVersionUID = -8135459207158536268L;
+
+ public void doGet(HttpServletRequest request,
+ HttpServletResponse response) throws IOException, ServletException {
+
+ /* Forward the request to the JSP that does all the hard work. */
+ request.getRequestDispatcher("WEB-INF/papers.jsp").forward(request,
+ response);
+ }
+}
+
diff --git a/website/src/org/torproject/metrics/web/research/ResearchStatsServlet.java b/website/src/org/torproject/metrics/web/research/ResearchStatsServlet.java
new file mode 100644
index 0000000..e9eaa38
--- /dev/null
+++ b/website/src/org/torproject/metrics/web/research/ResearchStatsServlet.java
@@ -0,0 +1,132 @@
+/* Copyright 2013 The Tor Project
+ * See LICENSE for licensing information */
+package org.torproject.metrics.web.research;
+
+import java.io.BufferedInputStream;
+import java.io.ByteArrayOutputStream;
+import java.io.File;
+import java.io.FileInputStream;
+import java.io.IOException;
+import java.util.SortedSet;
+import java.util.TreeSet;
+
+import javax.servlet.ServletConfig;
+import javax.servlet.ServletException;
+import javax.servlet.http.HttpServlet;
+import javax.servlet.http.HttpServletRequest;
+import javax.servlet.http.HttpServletResponse;
+
+public class ResearchStatsServlet extends HttpServlet {
+
+ private static final long serialVersionUID = 3346710354297653810L;
+
+ private File statsDir;
+
+ private SortedSet<String> availableStatisticsFiles;
+
+ public void init(ServletConfig config) throws ServletException {
+ super.init(config);
+ this.statsDir = new File(config.getInitParameter("statsDir"));
+ this.availableStatisticsFiles = new TreeSet<String>();
+ this.availableStatisticsFiles.add("servers");
+ this.availableStatisticsFiles.add("bandwidth");
+ this.availableStatisticsFiles.add("fast-exits");
+ this.availableStatisticsFiles.add("clients");
+ this.availableStatisticsFiles.add("torperf");
+ this.availableStatisticsFiles.add("connbidirect");
+ }
+
+ public long getLastModified(HttpServletRequest request) {
+ File statsFile = this.determineStatsFile(request);
+ if (statsFile == null || !statsFile.exists()) {
+ return -1L;
+ } else {
+ return statsFile.lastModified();
+ }
+ }
+
+ public void doGet(HttpServletRequest request,
+ HttpServletResponse response) throws IOException, ServletException {
+ String requestURI = request.getRequestURI();
+ if (requestURI.equals("/ernie/stats/")) {
+ this.writeDirectoryListing(request, response);
+ } else if (requestURI.equals("/ernie/stats.html")) {
+ this.writeStatisticsPage(request, response);
+ } else {
+ File statsFile = this.determineStatsFile(request);
+ if (statsFile == null) {
+ response.sendError(HttpServletResponse.SC_NOT_FOUND);
+ return;
+ } else if (!this.writeStatsFile(statsFile, response)) {
+ response.sendError(HttpServletResponse.SC_INTERNAL_SERVER_ERROR);
+ }
+ }
+ }
+
+ private void writeDirectoryListing(HttpServletRequest request,
+ HttpServletResponse response) throws IOException, ServletException {
+ request.setAttribute("directory", "/stats");
+ request.setAttribute("extension", ".csv");
+ request.setAttribute("files", this.availableStatisticsFiles);
+ request.getRequestDispatcher("/WEB-INF/dir.jsp").forward(request,
+ response);
+ }
+
+ private void writeStatisticsPage(HttpServletRequest request,
+ HttpServletResponse response) throws IOException, ServletException {
+ request.getRequestDispatcher("/WEB-INF/stats.jsp").forward(request,
+ response);
+ }
+
+ private File determineStatsFile(HttpServletRequest request) {
+ String requestedStatsFile = request.getRequestURI();
+ if (requestedStatsFile.equals("/ernie/stats/") ||
+ requestedStatsFile.equals("/ernie/stats.html")) {
+ return null;
+ }
+ if (requestedStatsFile.endsWith(".csv")) {
+ requestedStatsFile = requestedStatsFile.substring(0,
+ requestedStatsFile.length() - ".csv".length());
+ }
+ if (requestedStatsFile.contains("/")) {
+ requestedStatsFile = requestedStatsFile.substring(
+ requestedStatsFile.lastIndexOf("/") + 1);
+ }
+ if (!availableStatisticsFiles.contains(requestedStatsFile)) {
+ return null;
+ } else {
+ return new File(this.statsDir, requestedStatsFile + ".csv");
+ }
+ }
+
+ private boolean writeStatsFile(File statsFile,
+ HttpServletResponse response) throws IOException, ServletException {
+ if (!statsFile.exists()) {
+ return false;
+ }
+ byte[] statsFileBytes;
+ try {
+ BufferedInputStream bis = new BufferedInputStream(
+ new FileInputStream(statsFile), 1024);
+ ByteArrayOutputStream baos = new ByteArrayOutputStream();
+ byte[] buffer = new byte[1024];
+ int length;
+ while ((length = bis.read(buffer)) > 0) {
+ baos.write(buffer, 0, length);
+ }
+ bis.close();
+ statsFileBytes = baos.toByteArray();
+ } catch (IOException e) {
+ return false;
+ }
+ String statsFileContent = new String(statsFileBytes);
+ response.setContentType("text/csv");
+ response.setHeader("Content-Length", String.valueOf(
+ statsFileContent.length()));
+ response.setHeader("Content-Disposition",
+ "inline; filename=\"" + statsFile.getName() + "\"");
+ response.getWriter().print(statsFileContent);
+ return true;
+ }
+}
+
diff --git a/website/src/org/torproject/metrics/web/research/ResearchToolsServlet.java b/website/src/org/torproject/metrics/web/research/ResearchToolsServlet.java
new file mode 100644
index 0000000..a7de0f4
--- /dev/null
+++ b/website/src/org/torproject/metrics/web/research/ResearchToolsServlet.java
@@ -0,0 +1,24 @@
+/* Copyright 2011, 2012 The Tor Project
+ * See LICENSE for licensing information */
+package org.torproject.metrics.web.research;
+
+import java.io.IOException;
+
+import javax.servlet.ServletException;
+import javax.servlet.http.HttpServlet;
+import javax.servlet.http.HttpServletRequest;
+import javax.servlet.http.HttpServletResponse;
+
+public class ResearchToolsServlet extends HttpServlet {
+
+ private static final long serialVersionUID = -3344204426180358872L;
+
+ public void doGet(HttpServletRequest request,
+ HttpServletResponse response) throws IOException, ServletException {
+
+ /* Forward the request to the JSP that does all the hard work. */
+ request.getRequestDispatcher("WEB-INF/tools.jsp").forward(request,
+ response);
+ }
+}
+
diff --git a/website/src/org/torproject/metrics/web/status/ConsensusHealthServlet.java b/website/src/org/torproject/metrics/web/status/ConsensusHealthServlet.java
new file mode 100644
index 0000000..d693349
--- /dev/null
+++ b/website/src/org/torproject/metrics/web/status/ConsensusHealthServlet.java
@@ -0,0 +1,57 @@
+/* Copyright 2011, 2012 The Tor Project
+ * See LICENSE for licensing information */
+package org.torproject.metrics.web.status;
+
+import java.io.BufferedInputStream;
+import java.io.BufferedOutputStream;
+import java.io.File;
+import java.io.FileInputStream;
+import java.io.IOException;
+
+import javax.servlet.ServletException;
+import javax.servlet.http.HttpServlet;
+import javax.servlet.http.HttpServletRequest;
+import javax.servlet.http.HttpServletResponse;
+
+public class ConsensusHealthServlet extends HttpServlet {
+
+ private static final long serialVersionUID = -5230032733057814869L;
+
+ public void doGet(HttpServletRequest request,
+ HttpServletResponse response) throws IOException,
+ ServletException {
+
+ /* Read file from disk and write it to response. */
+ BufferedInputStream input = null;
+ BufferedOutputStream output = null;
+ try {
+ File f = new File("/srv/metrics.torproject.org/ernie/website/"
+ + "consensus-health.html");
+ if (!f.exists()) {
+ response.sendError(HttpServletResponse.SC_NOT_FOUND);
+ return;
+ }
+ response.setContentType(this.getServletContext().getMimeType(f.getName()));
+ response.setHeader("Content-Length", String.valueOf(
+ f.length()));
+ response.setHeader("Content-Disposition",
+ "inline; filename=\"" + f.getName() + "\"");
+ input = new BufferedInputStream(new FileInputStream(f),
+ 1024);
+ output = new BufferedOutputStream(response.getOutputStream(), 1024);
+ byte[] buffer = new byte[1024];
+ int length;
+ while ((length = input.read(buffer)) > 0) {
+ output.write(buffer, 0, length);
+ }
+ } finally {
+ if (output != null) {
+ output.close();
+ }
+ if (input != null) {
+ input.close();
+ }
+ }
+ }
+}
+
diff --git a/website/src/org/torproject/metrics/web/status/ExoneraTorServlet.java b/website/src/org/torproject/metrics/web/status/ExoneraTorServlet.java
new file mode 100644
index 0000000..30a2ff8
--- /dev/null
+++ b/website/src/org/torproject/metrics/web/status/ExoneraTorServlet.java
@@ -0,0 +1,24 @@
+/* Copyright 2011, 2012 The Tor Project
+ * See LICENSE for licensing information */
+package org.torproject.metrics.web.status;
+
+import java.io.IOException;
+
+import javax.servlet.ServletException;
+import javax.servlet.http.HttpServlet;
+import javax.servlet.http.HttpServletRequest;
+import javax.servlet.http.HttpServletResponse;
+
+public class ExoneraTorServlet extends HttpServlet {
+
+ private static final long serialVersionUID = -6227541092325776626L;
+
+ public void doGet(HttpServletRequest request,
+ HttpServletResponse response) throws IOException, ServletException {
+
+ /* Forward the request to the JSP that does all the hard work. */
+ request.getRequestDispatcher("WEB-INF/exonerator.jsp").forward(
+ request, response);
+ }
+}
+
diff --git a/website/src/org/torproject/metrics/web/status/RelaySearchServlet.java b/website/src/org/torproject/metrics/web/status/RelaySearchServlet.java
new file mode 100644
index 0000000..49b091e
--- /dev/null
+++ b/website/src/org/torproject/metrics/web/status/RelaySearchServlet.java
@@ -0,0 +1,535 @@
+/* Copyright 2011, 2012 The Tor Project
+ * See LICENSE for licensing information */
+package org.torproject.metrics.web.status;
+
+import java.io.IOException;
+import java.io.UnsupportedEncodingException;
+import java.math.BigInteger;
+import java.sql.Connection;
+import java.sql.ResultSet;
+import java.sql.SQLException;
+import java.sql.Statement;
+import java.text.ParseException;
+import java.text.SimpleDateFormat;
+import java.util.ArrayList;
+import java.util.Calendar;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.SortedMap;
+import java.util.SortedSet;
+import java.util.TimeZone;
+import java.util.TreeMap;
+import java.util.TreeSet;
+import java.util.logging.Level;
+import java.util.logging.Logger;
+import java.util.regex.Pattern;
+
+import javax.naming.Context;
+import javax.naming.InitialContext;
+import javax.naming.NamingException;
+import javax.servlet.ServletException;
+import javax.servlet.http.HttpServlet;
+import javax.servlet.http.HttpServletRequest;
+import javax.servlet.http.HttpServletResponse;
+import javax.sql.DataSource;
+
+import org.apache.commons.codec.binary.Base64;
+
+/**
+ * Web page that allows users to search for relays in the descriptor
+ * archives.
+ *
+ * Possible search terms for testing:
+ * - gabelmoo
+ * - gabelmoo 2010-09
+ * - gabelmoo 2010-09-18
+ * - gabelmoo $F2044413DAC2E02E3D6BCF4735A19BCA1DE97281
+ * - gabelmoo 80.190.246
+ * - gabelmoo $F2044413DAC2E02E3D6BCF4735A19BCA1DE97281 80.190.246
+ * - 5898549205 dc737cc9dca16af6 79.212.74.45
+ * - 5898549205 dc737cc9dca16af6
+ * - 80.190.246.100
+ * - $F2044413DAC2E02E3D6BCF4735A19BCA1DE97281
+ * - $F2044413DAC2E02E3D6BCF4735A19BCA1DE97281 80.190.246
+ * - 58985492
+ * - 58985492 79.212.74.45
+ */
+public class RelaySearchServlet extends HttpServlet {
+
+ private static final long serialVersionUID = -1772662230310611805L;
+
+ private Pattern alphaNumDotDashDollarSpacePattern =
+ Pattern.compile("[A-Za-z0-9\\.\\-$ ]+");
+
+ private Pattern numPattern = Pattern.compile("[0-9]+");
+
+ private Pattern hexPattern = Pattern.compile("[A-Fa-f0-9]+");
+
+ private Pattern alphaNumPattern = Pattern.compile("[A-Za-z0-9]+");
+
+ private SimpleDateFormat dayFormat = new SimpleDateFormat("yyyy-MM-dd");
+
+ private SimpleDateFormat monthFormat = new SimpleDateFormat("yyyy-MM");
+
+ private SimpleDateFormat dateTimeFormat =
+ new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
+
+ private long minValidAfterMillis;
+
+ private DataSource ds;
+
+ private Logger logger;
+
+ public void init() {
+
+ /* Initialize logger. */
+ this.logger = Logger.getLogger(RelaySearchServlet.class.toString());
+
+ /* Initialize date format parsers. */
+ dayFormat.setTimeZone(TimeZone.getTimeZone("UTC"));
+ monthFormat.setTimeZone(TimeZone.getTimeZone("UTC"));
+ dateTimeFormat.setTimeZone(TimeZone.getTimeZone("UTC"));
+
+ /* Look up data source. */
+ try {
+ Context cxt = new InitialContext();
+ this.ds = (DataSource) cxt.lookup("java:comp/env/jdbc/tordir");
+ this.logger.info("Successfully looked up data source.");
+ } catch (NamingException e) {
+ this.logger.log(Level.WARNING, "Could not look up data source", e);
+ }
+
+ /* Look up first consensus in the database. */
+ try {
+ long requestedConnection = System.currentTimeMillis();
+ Connection conn = this.ds.getConnection();
+ String query = "SELECT MIN(validafter) AS first FROM consensus";
+ Statement statement = conn.createStatement();
+ ResultSet rs = statement.executeQuery(query);
+ if (rs.next()) {
+ this.minValidAfterMillis = rs.getTimestamp(1).getTime();
+ }
+ rs.close();
+ statement.close();
+ conn.close();
+ this.logger.info("Returned a database connection to the pool "
+ + "after " + (System.currentTimeMillis()
+ - requestedConnection) + " millis.");
+ } catch (SQLException e) {
+ this.logger.log(Level.WARNING, "Could not look up first consensus "
+ + "valid-after time in the database.", e);
+ }
+ }
+
+ public void doGet(HttpServletRequest request,
+ HttpServletResponse response) throws IOException,
+ ServletException {
+
+ /* Read search parameter. If we don't have a search parameter, we're
+ * done here. */
+ String searchParameter = request.getParameter("search");
+ if (searchParameter == null || searchParameter.length() == 0) {
+ request.getRequestDispatcher("WEB-INF/relay-search.jsp").forward(
+ request, response);
+ return;
+ }
+
+ /* Parse search parameter to identify what nickname, fingerprint,
+ * and/or IP address to search for. A valid query contains no more
+ * than one identifier for each of the fields. As a special case,
+ * there are search terms consisting of 8 to 19 hex characters that
+ * can be either a nickname or a fingerprint. */
+ String searchNickname = "";
+ String searchFingerprint = "";
+ String searchIPAddress = "";
+ SortedSet<String> searchDays = new TreeSet<String>();
+ SortedSet<String> searchMonths = new TreeSet<String>();
+ SortedSet<Long> searchDayTimestamps = new TreeSet<Long>();
+ SortedSet<Long> searchMonthTimestamps = new TreeSet<Long>();
+ boolean validQuery = false;
+
+ /* Only parse search parameter if it contains nothing else than
+ * alphanumeric characters, dots, and spaces. */
+ if (alphaNumDotDashDollarSpacePattern.matcher(searchParameter).
+ matches()) {
+ SortedSet<String> searchTerms = new TreeSet<String>();
+ if (searchParameter.trim().contains(" ")) {
+ String[] split = searchParameter.trim().split(" ");
+ for (int i = 0; i < split.length; i++) {
+ if (split[i].length() > 0) {
+ searchTerms.add(split[i]);
+ }
+ }
+ } else {
+ searchTerms.add(searchParameter.trim());
+ }
+
+ /* Parse each search term separately. */
+ for (String searchTerm : searchTerms) {
+
+ /* If the search term contains a dot, it can only be an IP
+ * address. */
+ if (searchTerm.contains(".") && !searchTerm.startsWith(".")) {
+ String[] octets = searchTerm.split("\\.");
+ if (searchIPAddress.length() > 0 || octets.length < 2 ||
+ octets.length > 4) {
+ validQuery = false;
+ break;
+ }
+ boolean invalidOctet = false;
+ StringBuilder sb = new StringBuilder();
+ for (int i = 0; i < octets.length; i++) {
+ if (!numPattern.matcher(octets[i]).matches() ||
+ octets[i].length() > 3 ||
+ Integer.parseInt(octets[i]) > 255) {
+ invalidOctet = true;
+ break;
+ } else {
+ sb.append("." + Integer.parseInt(octets[i]));
+ }
+ }
+ if (invalidOctet) {
+ validQuery = false;
+ break;
+ }
+ if (octets.length < 4) {
+ sb.append(".");
+ }
+ searchIPAddress = sb.toString().substring(1);
+ validQuery = true;
+ }
+
+ /* If the search term contains hyphens, it must be a month or a
+ * day. */
+ else if (searchTerm.contains("-") &&
+ searchTerm.startsWith("20")) {
+ try {
+ if (searchTerm.length() == 10) {
+ searchDayTimestamps.add(dayFormat.parse(searchTerm).
+ getTime());
+ searchDays.add(searchTerm);
+ } else if (searchTerm.length() == 7) {
+ searchMonthTimestamps.add(monthFormat.parse(searchTerm).
+ getTime());
+ searchMonths.add(searchTerm);
+ } else {
+ validQuery = false;
+ break;
+ }
+ } catch (ParseException e) {
+ validQuery = false;
+ break;
+ }
+ }
+
+ /* If the search term starts with a $ followed by 8 to 40 hex
+ * characters, it must be a fingerprint. */
+ else if ((searchTerm.length() >= 9 && searchTerm.length() <= 41 &&
+ searchTerm.startsWith("$") &&
+ hexPattern.matcher(searchTerm.substring(1)).matches()) ||
+ (searchTerm.length() > 19 && searchTerm.length() <= 40 &&
+ !searchTerm.startsWith("$") &&
+ hexPattern.matcher(searchTerm).matches())) {
+ if (searchFingerprint.length() > 0) {
+ validQuery = false;
+ break;
+ }
+ searchFingerprint = searchTerm.substring(
+ (searchTerm.startsWith("$") ? 1 : 0));
+ validQuery = true;
+ }
+
+ /* If the search term contains up to 19 alphanumerical characters,
+ * it must be a nickname. */
+ else if (searchTerm.length() <= 19 &&
+ alphaNumPattern.matcher(searchTerm).matches()) {
+ if (searchNickname.length() > 0) {
+ validQuery = false;
+ break;
+ }
+ searchNickname = searchTerm;
+ validQuery = true;
+ }
+
+ /* We didn't recognize this search term. */
+ else {
+ validQuery = false;
+ break;
+ }
+ }
+ }
+
+ /* We only accept at most one month or three days, but not both, or
+ * people could accidentally keep the database busy. */
+ if (searchDays.size() > 3 || searchMonths.size() > 1 ||
+ (searchMonths.size() == 1 && searchDays.size() > 0)) {
+ validQuery = false;
+ }
+
+ /* If the query is invalid, stop here. */
+ if (!validQuery) {
+ request.setAttribute("invalidQuery", "Query is invalid.");
+ request.getRequestDispatcher("WEB-INF/relay-search.jsp").
+ forward(request, response);
+ return;
+ }
+
+ /* Look up last consensus in the database. */
+ long maxValidAfterMillis = -1L;
+ try {
+ long requestedConnection = System.currentTimeMillis();
+ Connection conn = this.ds.getConnection();
+ String query = "SELECT MAX(validafter) AS last FROM consensus";
+ Statement statement = conn.createStatement();
+ ResultSet rs = statement.executeQuery(query);
+ if (rs.next()) {
+ maxValidAfterMillis = rs.getTimestamp(1).getTime();
+ }
+ rs.close();
+ statement.close();
+ conn.close();
+ this.logger.info("Returned a database connection to the pool "
+ + "after " + (System.currentTimeMillis()
+ - requestedConnection) + " millis.");
+ } catch (SQLException e) {
+ this.logger.log(Level.WARNING, "Could not look up last consensus "
+ + "valid-after time in the database.", e);
+ }
+
+ /* Prepare a string that says what we're searching for. */
+ List<String> recognizedSearchTerms = new ArrayList<String>();
+ if (searchNickname.length() > 0) {
+ recognizedSearchTerms.add("nickname <b>" + searchNickname + "</b>");
+ }
+ if (searchFingerprint.length() > 0) {
+ recognizedSearchTerms.add("fingerprint <b>" + searchFingerprint
+ + "</b>");
+ }
+ if (searchIPAddress.length() > 0) {
+ recognizedSearchTerms.add("IP address <b>" + searchIPAddress
+ + "</b>");
+ }
+ List<String> recognizedIntervals = new ArrayList<String>();
+ for (String searchTerm : searchMonths) {
+ recognizedIntervals.add("in <b>" + searchTerm + "</b>");
+ }
+ for (String searchTerm : searchDays) {
+ recognizedIntervals.add("on <b>" + searchTerm + "</b>");
+ }
+ StringBuilder searchNoticeBuilder = new StringBuilder();
+ if (maxValidAfterMillis > 0L) {
+ searchNoticeBuilder.append("Most recent consensus in database is "
+ + "from " + dateTimeFormat.format(maxValidAfterMillis)
+ + ".</p><p>");
+ }
+ searchNoticeBuilder.append("Searching for relays with ");
+ if (recognizedSearchTerms.size() == 1) {
+ searchNoticeBuilder.append(recognizedSearchTerms.get(0));
+ } else if (recognizedSearchTerms.size() == 2) {
+ searchNoticeBuilder.append(recognizedSearchTerms.get(0) + " and "
+ + recognizedSearchTerms.get(1));
+ } else {
+ for (int i = 0; i < recognizedSearchTerms.size() - 1; i++) {
+ searchNoticeBuilder.append(recognizedSearchTerms.get(i) + ", ");
+ }
+ searchNoticeBuilder.append("and " + recognizedSearchTerms.get(
+ recognizedSearchTerms.size() - 1));
+ }
+ if (recognizedIntervals.size() == 1) {
+ searchNoticeBuilder.append(" running "
+ + recognizedIntervals.get(0));
+ } else if (recognizedIntervals.size() == 2) {
+ searchNoticeBuilder.append(" running " + recognizedIntervals.get(0)
+ + " and/or " + recognizedIntervals.get(1));
+ } else if (recognizedIntervals.size() > 2) {
+ searchNoticeBuilder.append(" running ");
+ for (int i = 0; i < recognizedIntervals.size() - 1; i++) {
+ searchNoticeBuilder.append(recognizedIntervals.get(i) + ", ");
+ }
+ searchNoticeBuilder.append("and/or " + recognizedIntervals.get(
+ recognizedIntervals.size() - 1));
+ }
+ searchNoticeBuilder.append(" ...");
+ String searchNotice = searchNoticeBuilder.toString();
+ request.setAttribute("searchNotice", searchNotice);
+
+ /* Prepare the query string. */
+ StringBuilder conditionBuilder = new StringBuilder();
+ boolean addAnd = false;
+ if (searchNickname.length() > 0) {
+ conditionBuilder.append((addAnd ? "AND " : "")
+ + "LOWER(nickname) LIKE '" + searchNickname.toLowerCase()
+ + "%' ");
+ addAnd = true;
+ }
+ if (searchFingerprint.length() > 0) {
+ conditionBuilder.append((addAnd ? "AND " : "")
+ + "fingerprint LIKE '" + searchFingerprint.toLowerCase()
+ + "%' ");
+ addAnd = true;
+ }
+ if (searchIPAddress.length() > 0) {
+ conditionBuilder.append((addAnd ? "AND " : "")
+ + "address LIKE '" + searchIPAddress + "%' ");
+ addAnd = true;
+ }
+ List<String> timeIntervals = new ArrayList<String>();
+ if (searchDayTimestamps.size() > 0 ||
+ searchMonthTimestamps.size() > 0) {
+ StringBuilder timeIntervalBuilder = new StringBuilder();
+ boolean addOr = false;
+ timeIntervalBuilder.append("AND (");
+ for (long searchTimestamp : searchDayTimestamps) {
+ if (searchTimestamp < this.minValidAfterMillis) {
+ request.setAttribute("outsideInterval", "Returned search "
+ + "results may be incomplete, as our data only dates back "
+ + "to " + dateTimeFormat.format(this.minValidAfterMillis)
+ + ". Older archives are not available.");
+ }
+ timeIntervalBuilder.append((addOr ? "OR " : "")
+ + "(validafter >= '"
+ + dateTimeFormat.format(searchTimestamp) + "' AND "
+ + "validafter < '" + dateTimeFormat.format(searchTimestamp
+ + 24L * 60L * 60L * 1000L) + "') ");
+ addOr = true;
+ }
+ for (long searchTimestamp : searchMonthTimestamps) {
+ if (searchTimestamp < this.minValidAfterMillis) {
+ request.setAttribute("outsideInterval", "Returned search "
+ + "results may be incomplete, as our data only dates back "
+ + "to " + dateTimeFormat.format(this.minValidAfterMillis)
+ + ". Older archives are not available.");
+ }
+ Calendar firstOfNextMonth = Calendar.getInstance(
+ TimeZone.getTimeZone("UTC"));
+ firstOfNextMonth.setTimeInMillis(searchTimestamp);
+ firstOfNextMonth.add(Calendar.MONTH, 1);
+ timeIntervalBuilder.append((addOr ? "OR " : "")
+ + "(validafter >= '"
+ + dateTimeFormat.format(searchTimestamp) + "' AND "
+ + "validafter < '" + dateTimeFormat.format(
+ firstOfNextMonth.getTimeInMillis()) + "') ");
+ addOr = true;
+ }
+ timeIntervalBuilder.append(") ");
+ timeIntervals.add(timeIntervalBuilder.toString());
+ } else {
+ timeIntervals.add("AND validafter >= '"
+ + dateTimeFormat.format(System.currentTimeMillis()
+ - 4L * 24L * 60L * 60L * 1000L) + "' ");
+ timeIntervals.add("AND validafter >= '"
+ + dateTimeFormat.format(System.currentTimeMillis()
+ - 30L * 24L * 60L * 60L * 1000L) + "' ");
+ }
+ List<String> queries = new ArrayList<String>();
+ for (String timeInterval : timeIntervals) {
+ StringBuilder queryBuilder = new StringBuilder();
+ queryBuilder.append("SELECT validafter, fingerprint, descriptor, "
+ + "rawdesc FROM statusentry WHERE validafter IN (SELECT "
+ + "validafter FROM statusentry WHERE ");
+ queryBuilder.append(conditionBuilder.toString());
+ queryBuilder.append(timeInterval);
+ queryBuilder.append("ORDER BY validafter DESC LIMIT 31) AND ");
+ queryBuilder.append(conditionBuilder.toString());
+ queryBuilder.append(timeInterval);
+ queries.add(queryBuilder.toString());
+ }
+
+ /* Actually execute the query. */
+ long startedQuery = System.currentTimeMillis();
+ SortedMap<String, SortedSet<String>> foundDescriptors =
+ new TreeMap<String, SortedSet<String>>(
+ Collections.reverseOrder());
+ Map<String, String> rawValidAfterLines =
+ new HashMap<String, String>();
+ Map<String, String> rawStatusEntries = new HashMap<String, String>();
+ String query = null;
+ int matches = 0;
+ try {
+ long requestedConnection = System.currentTimeMillis();
+ Connection conn = this.ds.getConnection();
+ while (!queries.isEmpty()) {
+ query = queries.remove(0);
+ this.logger.info("Running query '" + query + "'.");
+ Statement statement = conn.createStatement();
+ ResultSet rs = statement.executeQuery(query);
+ while (rs.next()) {
+ matches++;
+ String validAfter = rs.getTimestamp(1).toString().
+ substring(0, 19);
+ String fingerprint = rs.getString(2);
+ if (!foundDescriptors.containsKey(validAfter)) {
+ foundDescriptors.put(validAfter, new TreeSet<String>());
+ }
+ foundDescriptors.get(validAfter).add(validAfter + " "
+ + fingerprint);
+ if (!rawValidAfterLines.containsKey(validAfter)) {
+ rawValidAfterLines.put(validAfter, "<tt>valid-after "
+ + "<a href=\"https://exonerator.torproject.org/"
+ + "consensus?valid-after="
+ + validAfter.replaceAll(":", "-").replaceAll(" ", "-")
+ + "\" target=\"_blank\">" + validAfter + "</a></tt><br>");
+ }
+ byte[] rawStatusEntry = rs.getBytes(4);
+ String statusEntryLines = null;
+ try {
+ statusEntryLines = new String(rawStatusEntry, "US-ASCII");
+ } catch (UnsupportedEncodingException e) {
+ /* This shouldn't happen, because we know that ASCII is
+ * supported. */
+ }
+ StringBuilder rawStatusEntryBuilder = new StringBuilder();
+ String[] lines = statusEntryLines.split("\n");
+ for (String line : lines) {
+ if (line.startsWith("r ")) {
+ String[] parts = line.split(" ");
+ String descriptorBase64 = String.format("%040x",
+ new BigInteger(1, Base64.decodeBase64(parts[3]
+ + "==")));
+ rawStatusEntryBuilder.append("<tt>r " + parts[1] + " "
+ + parts[2] + " <a href=\""
+ + "https://exonerator.torproject.org/"
+ + "serverdesc?desc-id="
+ + descriptorBase64 + "\" target=\"_blank\">" + parts[3]
+ + "</a> " + parts[4] + " " + parts[5] + " " + parts[6]
+ + " " + parts[7] + " " + parts[8] + "</tt><br>");
+ } else {
+ rawStatusEntryBuilder.append("<tt>" + line + "</tt><br>");
+ }
+ }
+ rawStatusEntries.put(validAfter + " " + fingerprint,
+ rawStatusEntryBuilder.toString());
+ }
+ rs.close();
+ statement.close();
+ if (matches >= 31) {
+ queries.clear();
+ }
+ }
+ conn.close();
+ this.logger.info("Returned a database connection to the pool "
+ + "after " + (System.currentTimeMillis()
+ - requestedConnection) + " millis.");
+ } catch (SQLException e) {
+
+ /* Tell the user we have a database problem. */
+ response.sendError(HttpServletResponse.SC_INTERNAL_SERVER_ERROR,
+ "Database problem");
+ return;
+ }
+ request.setAttribute("query", query);
+ request.setAttribute("queryTime", System.currentTimeMillis()
+ - startedQuery);
+ request.setAttribute("foundDescriptors", foundDescriptors);
+ request.setAttribute("rawValidAfterLines", rawValidAfterLines);
+ request.setAttribute("rawStatusEntries", rawStatusEntries);
+ request.setAttribute("matches", matches);
+
+ /* We're done. Let the JSP do the rest. */
+ request.getRequestDispatcher("WEB-INF/relay-search.jsp").forward(
+ request, response);
+ }
+}
+
_______________________________________________
tor-commits mailing list
tor-commits@xxxxxxxxxxxxxxxxxxxx
https://lists.torproject.org/cgi-bin/mailman/listinfo/tor-commits