[Author Prev][Author Next][Thread Prev][Thread Next][Author Index][Thread Index]

[or-cvs] [metrics-db/master] Import remaining stats files into the database.



Author: Karsten Loesing <karsten.loesing@xxxxxxx>
Date: Mon, 4 Oct 2010 16:13:58 +0200
Subject: Import remaining stats files into the database.
Commit: 06f051e147f9aefb7880f6eb06fd2f1bf40d6339

This commit is based on work by Kevin Berry.
---
 config                                             |    5 +
 db/tordir.sql                                      |   53 ++++++++++
 .../ernie/db/BridgeStatsFileHandler.java           |   85 ++++++++++++++++-
 src/org/torproject/ernie/db/Configuration.java     |   10 ++-
 .../ernie/db/ConsensusStatsFileHandler.java        |   64 ++++++++++++-
 .../ernie/db/DirreqStatsFileHandler.java           |  101 +++++++++++++++++++-
 src/org/torproject/ernie/db/GetTorProcessor.java   |   77 ++++++++++++++-
 src/org/torproject/ernie/db/Main.java              |   20 +++-
 src/org/torproject/ernie/db/TorperfProcessor.java  |   79 +++++++++++++++-
 9 files changed, 476 insertions(+), 18 deletions(-)

diff --git a/config b/config
index 64ce8ff..8fd75b9 100644
--- a/config
+++ b/config
@@ -83,6 +83,11 @@
 ## Write relay descriptors to a database for later evaluation
 #WriteRelayDescriptorDatabase 0
 #
+## Write aggregate statistics (bridges and bridge users per day, directory
+## clients per day, torperf results, packages requested from GetTor, etc.)
+## to database for later evaluation
+#WriteAggregateStatsDatabase 0
+#
 ## JDBC string for relay descriptor database
 #RelayDescriptorDatabaseJDBC jdbc:postgresql://localhost/tordir?user=ernie&password=password
 #
diff --git a/db/tordir.sql b/db/tordir.sql
index 37654de..85a6f22 100644
--- a/db/tordir.sql
+++ b/db/tordir.sql
@@ -500,3 +500,56 @@ CREATE OR REPLACE FUNCTION refresh_total_bwhist() RETURNS INTEGER AS $$
   END;
 $$ LANGUAGE plpgsql;
 
+-- non-relay statistics
+-- The following tables contain pre-aggregated statistics that are not
+-- based on relay descriptors or that are not yet derived from the relay
+-- descriptors in the database.
+
+-- TABLE bridge_network_size
+-- Contains average number of running bridges.
+CREATE TABLE bridge_network_size (
+    "date" DATE NOT NULL,
+    avg_running INTEGER NOT NULL,
+    CONSTRAINT bridge_network_size_pkey PRIMARY KEY(date)
+);
+
+-- TABLE dirreq_stats
+-- Contains daily users by country.
+CREATE TABLE dirreq_stats (
+    source CHARACTER(40) NOT NULL,
+    "date" DATE NOT NULL,
+    country CHARACTER(2) NOT NULL,
+    requests INTEGER NOT NULL,
+    "share" DOUBLE PRECISION NOT NULL,
+    CONSTRAINT dirreq_stats_pkey PRIMARY KEY (source, "date", country)
+);
+
+-- TABLE bridge_stats
+-- Contains daily bridge users by country.
+CREATE TABLE bridge_stats (
+    "date" DATE NOT NULL,
+    country CHARACTER(2) NOT NULL,
+    users INTEGER NOT NULL,
+    CONSTRAINT bridge_stats_pkey PRIMARY KEY ("date", country)
+);
+
+-- TABLE torperf_stats
+-- Quantiles and medians of daily torperf results.
+CREATE TABLE torperf_stats (
+    "date" DATE NOT NULL,
+    source CHARACTER VARYING(32) NOT NULL,
+    q1 INTEGER NOT NULL,
+    md INTEGER NOT NULL,
+    q3 INTEGER NOT NULL,
+    CONSTRAINT torperf_stats_pkey PRIMARY KEY("date", source)
+);
+
+-- TABLE gettor_stats
+-- Packages requested from GetTor
+CREATE TABLE gettor_stats (
+    "date" DATE NOT NULL,
+    bundle CHARACTER VARYING(32) NOT NULL,
+    downloads INTEGER NOT NULL,
+    CONSTRAINT gettor_stats_pkey PRIMARY KEY("date", bundle)
+);
+
diff --git a/src/org/torproject/ernie/db/BridgeStatsFileHandler.java b/src/org/torproject/ernie/db/BridgeStatsFileHandler.java
index f37da08..ca4dff8 100644
--- a/src/org/torproject/ernie/db/BridgeStatsFileHandler.java
+++ b/src/org/torproject/ernie/db/BridgeStatsFileHandler.java
@@ -3,6 +3,7 @@
 package org.torproject.ernie.db;
 
 import java.io.*;
+import java.sql.*;
 import java.text.*;
 import java.util.*;
 import java.util.logging.*;
@@ -74,12 +75,16 @@ public class BridgeStatsFileHandler {
    */
   private Logger logger;
 
+  /* Database connection string. */
+  private String connectionURL = null;
+
   /**
    * Initializes this class, including reading in intermediate results
    * files <code>stats/bridge-stats-raw</code> and
    * <code>stats/hashed-relay-identities</code>.
    */
-  public BridgeStatsFileHandler(SortedSet<String> countries) {
+  public BridgeStatsFileHandler(SortedSet<String> countries,
+     String connectionURL) {
 
     /* Memorize the set of countries we care about. */
     this.countries = countries;
@@ -97,6 +102,9 @@ public class BridgeStatsFileHandler {
     this.zeroTwoTwoDescriptorsFile = new File(
         "stats/v022-bridge-descriptors");
 
+    /* Initialize database connection string. */
+    this.connectionURL = connectionURL;
+
     /* Initialize logger. */
     this.logger = Logger.getLogger(
         BridgeStatsFileHandler.class.getName());
@@ -410,7 +418,7 @@ public class BridgeStatsFileHandler {
         while (currentDateMillis - 24L * 60L * 60L * 1000L
             > lastDateMillis) {
           lastDateMillis += 24L * 60L * 60L * 1000L;
-          bw.append(dateFormat.format(new Date(lastDateMillis)));
+          bw.append(dateFormat.format(lastDateMillis));
           for (int i = 0; i < this.countries.size(); i++) {
             bw.append(",NA");
           }
@@ -436,6 +444,79 @@ public class BridgeStatsFileHandler {
       this.logger.log(Level.WARNING, "Failed to write "
           + this.bridgeStatsFile.getAbsolutePath() + "!", e);
     }
+
+    /* Add daily bridge users to database. */
+    if (connectionURL != null) {
+      try {
+        List<String> countryList = new ArrayList<String>();
+        for (String c : this.countries) {
+          countryList.add(c);
+        }
+        Map<String, Double> insertRows = new HashMap<String, Double>(),
+            updateRows = new HashMap<String, Double>();
+        for (Map.Entry<String, double[]> e :
+            bridgeUsersPerDay.entrySet()) {
+          String date = e.getKey();
+          double[] users = e.getValue();
+          for (int i = 0; i < users.length; i++) {
+            String country = countryList.get(i);
+            String key = date + "," + country;
+            insertRows.put(key, users[i]);
+          }
+        }
+        Connection conn = DriverManager.getConnection(connectionURL);
+        conn.setAutoCommit(false);
+        Statement statement = conn.createStatement();
+        ResultSet rs = statement.executeQuery(
+            "SELECT date, country, users FROM bridge_stats");
+        while (rs.next()) {
+          String date = rs.getDate(1).toString();
+          String country = rs.getString(2);
+          String key = date + "," + country;
+          if (insertRows.containsKey(key)) {
+            double insertRow = insertRows.remove(key);
+            double oldUsers = rs.getDouble(3);
+            if (oldUsers != insertRow) {
+              updateRows.put(key, insertRow);
+            }
+          }
+        }
+        rs.close();
+        PreparedStatement psU = conn.prepareStatement(
+            "UPDATE bridge_stats SET users = ? "
+            + "WHERE date = ? AND country = ?");
+        for (Map.Entry<String, Double> e : updateRows.entrySet()) {
+          String[] keyParts = e.getKey().split(",");
+          java.sql.Date date = java.sql.Date.valueOf(keyParts[0]);
+          String country = keyParts[1];
+          double users = e.getValue();
+          psU.clearParameters();
+          psU.setDouble(1, users);
+          psU.setDate(2, date);
+          psU.setString(3, country);
+          psU.executeUpdate();
+        }
+        PreparedStatement psI = conn.prepareStatement(
+            "INSERT INTO bridge_stats (users, date, country) "
+            + "VALUES (?, ?, ?)");
+        for (Map.Entry<String, Double> e : insertRows.entrySet()) {
+          String[] keyParts = e.getKey().split(",");
+          java.sql.Date date = java.sql.Date.valueOf(keyParts[0]);
+          String country = keyParts[1];
+          double users = e.getValue();
+          psI.clearParameters();
+          psI.setDouble(1, users);
+          psI.setDate(2, date);
+          psI.setString(3, country);
+          psI.executeUpdate();
+        }
+        conn.commit();
+        conn.close();
+      } catch (SQLException e) {
+        logger.log(Level.WARNING, "Failed to add daily bridge users to "
+            + "database.", e);
+      }
+    }
   }
 }
 
diff --git a/src/org/torproject/ernie/db/Configuration.java b/src/org/torproject/ernie/db/Configuration.java
index 5fb5e0e..428dbb5 100644
--- a/src/org/torproject/ernie/db/Configuration.java
+++ b/src/org/torproject/ernie/db/Configuration.java
@@ -37,6 +37,7 @@ public class Configuration {
   private String directoryArchivesDirectory = "archives/";
   private boolean keepDirectoryArchiveImportHistory = false;
   private boolean writeRelayDescriptorDatabase = false;
+  private boolean writeAggregateStatsDatabase = false;
   private String relayDescriptorDatabaseJdbc =
       "jdbc:postgresql://localhost/tordir?user=ernie&password=password";
   private boolean writeRelayDescriptorsRawFiles = false;
@@ -141,6 +142,9 @@ public class Configuration {
         } else if (line.startsWith("WriteRelayDescriptorDatabase")) {
           this.writeRelayDescriptorDatabase = Integer.parseInt(
               line.split(" ")[1]) != 0;
+        } else if (line.startsWith("WriteAggregateStatsDatabase")) {
+          this.writeAggregateStatsDatabase = Integer.parseInt(
+              line.split(" ")[1]) != 0;
         } else if (line.startsWith("RelayDescriptorDatabaseJDBC")) {
           this.relayDescriptorDatabaseJdbc = line.split(" ")[1];
         } else if (line.startsWith("WriteRelayDescriptorsRawFiles")) {
@@ -244,6 +248,7 @@ public class Configuration {
         !this.downloadProcessGetTorStats && !this.downloadExitList &&
         !this.writeDirectoryArchives &&
         !this.writeRelayDescriptorDatabase &&
+        !this.writeAggregateStatsDatabase &&
         !this.writeSanitizedBridges && !this.writeConsensusStats &&
         !this.writeDirreqStats && !this.writeBridgeStats &&
         !this.writeServerDescriptorStats && !this.writeConsensusHealth) {
@@ -283,7 +288,7 @@ public class Configuration {
     }
     if ((this.importSanitizedBridges || this.importBridgeSnapshots) &&
         !(this.writeSanitizedBridges || this.writeConsensusStats ||
-        this.writeBridgeStats)) {
+        this.writeBridgeStats || this.writeAggregateStatsDatabase)) {
       logger.warning("We are configured to import/download bridge "
           + "descriptors, but we don't have a single data sink to write "
           + "bridge descriptors to.");
@@ -343,6 +348,9 @@ public class Configuration {
   public boolean getWriteRelayDescriptorDatabase() {
     return this.writeRelayDescriptorDatabase;
   }
+  public boolean getWriteAggregateStatsDatabase() {
+    return this.writeAggregateStatsDatabase;
+  }
   public String getRelayDescriptorDatabaseJDBC() {
     return this.relayDescriptorDatabaseJdbc;
   }
diff --git a/src/org/torproject/ernie/db/ConsensusStatsFileHandler.java b/src/org/torproject/ernie/db/ConsensusStatsFileHandler.java
index 9075323..469e239 100644
--- a/src/org/torproject/ernie/db/ConsensusStatsFileHandler.java
+++ b/src/org/torproject/ernie/db/ConsensusStatsFileHandler.java
@@ -3,6 +3,7 @@
 package org.torproject.ernie.db;
 
 import java.io.*;
+import java.sql.*;
 import java.text.*;
 import java.util.*;
 import java.util.logging.*;
@@ -88,13 +89,16 @@ public class ConsensusStatsFileHandler {
 
   private int relayResultsAdded = 0, bridgeResultsAdded = 0;
 
+  /* Database connection string. */
+  private String connectionURL = null;
+
  /**
   * Initializes this class, including reading in intermediate results
   * files <code>stats/consensus-stats-raw</code> and
   * <code>stats/bridge-consensus-stats-raw</code> and final results file
   * <code>stats/consensus-stats</code>.
   */
-  public ConsensusStatsFileHandler() {
+  public ConsensusStatsFileHandler(String connectionURL) {
 
     /* Initialize local data structures to hold intermediate and final
      * results. */
@@ -109,6 +113,9 @@ public class ConsensusStatsFileHandler {
         "stats/bridge-consensus-stats-raw");
     this.consensusStatsFile = new File("stats/consensus-stats");
 
+    /* Initialize database connection string. */
+    this.connectionURL = connectionURL;
+
     /* Initialize logger. */
     this.logger = Logger.getLogger(
         ConsensusStatsFileHandler.class.getName());
@@ -448,7 +455,7 @@ public class ConsensusStatsFileHandler {
         while (currentDateMillis <= lastDateMillis) {
           /* Write observations about relays, bridges, both, or none of
            * them. */
-          String date = dateFormat.format(new Date(currentDateMillis));
+          String date = dateFormat.format(currentDateMillis);
           if (this.relaysPerDay.containsKey(date)) {
             bw.append(this.relaysPerDay.get(date));
           } else {
@@ -478,6 +485,59 @@ public class ConsensusStatsFileHandler {
           + ", because nothing has changed.");
     }
 
+    /* Add average number of bridges per day to the database. */
+    if (connectionURL != null) {
+      try {
+        Map<String, String> insertRows = new HashMap<String, String>(),
+            updateRows = new HashMap<String, String>();
+        insertRows.putAll(this.bridgesPerDay);
+        Connection conn = DriverManager.getConnection(connectionURL);
+        conn.setAutoCommit(false);
+        Statement statement = conn.createStatement();
+        ResultSet rs = statement.executeQuery(
+            "SELECT date, avg_running FROM bridge_network_size");
+        while (rs.next()) {
+          String date = rs.getDate(1).toString();
+          if (insertRows.containsKey(date)) {
+            String insertRow = insertRows.remove(date);
+            long newAvgRunning = Long.parseLong(insertRow.substring(1));
+            long oldAvgRunning = rs.getLong(2);
+            if (newAvgRunning != oldAvgRunning) {
+              updateRows.put(date, insertRow);
+            }
+          }
+        }
+        rs.close();
+        PreparedStatement psU = conn.prepareStatement(
+            "UPDATE bridge_network_size SET avg_running = ? "
+            + "WHERE date = ?");
+        for (Map.Entry<String, String> e : updateRows.entrySet()) {
+          java.sql.Date date = java.sql.Date.valueOf(e.getKey());
+          long avgRunning = Long.parseLong(e.getValue().substring(1));
+          psU.clearParameters();
+          psU.setLong(1, avgRunning);
+          psU.setDate(2, date);
+          psU.executeUpdate();
+        }
+        PreparedStatement psI = conn.prepareStatement(
+            "INSERT INTO bridge_network_size (avg_running, date) "
+            + "VALUES (?, ?)");
+        for (Map.Entry<String, String> e : insertRows.entrySet()) {
+          java.sql.Date date = java.sql.Date.valueOf(e.getKey());
+          long avgRunning = Long.parseLong(e.getValue().substring(1));
+          psI.clearParameters();
+          psI.setLong(1, avgRunning);
+          psI.setDate(2, date);
+          psI.executeUpdate();
+        }
+        conn.commit();
+        conn.close();
+      } catch (SQLException e) {
+        logger.log(Level.WARNING, "Failed to add average bridge numbers "
+            + "to database.", e);
+      }
+    }
+
     /* Set modification flags to false again. */
     this.relaysRawModified = this.bridgesRawModified = false;
 
diff --git a/src/org/torproject/ernie/db/DirreqStatsFileHandler.java b/src/org/torproject/ernie/db/DirreqStatsFileHandler.java
index 3153fd9..2ab65c9 100644
--- a/src/org/torproject/ernie/db/DirreqStatsFileHandler.java
+++ b/src/org/torproject/ernie/db/DirreqStatsFileHandler.java
@@ -3,6 +3,7 @@
 package org.torproject.ernie.db;
 
 import java.io.*;
+import java.sql.*;
 import java.text.*;
 import java.util.*;
 import java.util.logging.*;
@@ -46,11 +47,15 @@ public class DirreqStatsFileHandler {
 
   private int addedResults = 0;
 
+  /* Database connection string. */
+  private String connectionURL = null;
+
   /**
    * Initializes this class, including reading in previous results from
    * <code>stats/dirreq-stats</code>.
    */
-  public DirreqStatsFileHandler(SortedSet<String> countries) {
+  public DirreqStatsFileHandler(SortedSet<String> countries,
+      String connectionURL) {
 
     /* Memorize the set of countries we care about. */
     this.countries = countries;
@@ -62,6 +67,9 @@ public class DirreqStatsFileHandler {
     /* Initialize file name for observations file. */
     this.dirreqStatsFile = new File("stats/dirreq-stats");
 
+    /* Initialize database connection string. */
+    this.connectionURL = connectionURL;
+
     /* Initialize logger. */
     this.logger = Logger.getLogger(
         DirreqStatsFileHandler.class.getName());
@@ -204,7 +212,7 @@ public class DirreqStatsFileHandler {
               > lastDateMillis) {
             lastDateMillis += 24L * 60L * 60L * 1000L;
             bw.append(currentDirectory + ","
-                + dateFormat.format(new Date(lastDateMillis)));
+                + dateFormat.format(lastDateMillis));
             for (int i = 0; i < this.countries.size(); i++) {
               bw.append(",NA");
             }
@@ -231,6 +239,95 @@ public class DirreqStatsFileHandler {
           + "nothing has changed.");
     }
 
+    /* Add directory requests by country to database. */
+    if (connectionURL != null) {
+      try {
+        List<String> countryList = new ArrayList<String>();
+        for (String c : this.countries) {
+          countryList.add(c);
+        }
+        Map<String, String> insertRows = new HashMap<String, String>(),
+            updateRows = new HashMap<String, String>();
+        for (String dirreq : this.dirreqs.values()) {
+          String[] parts = dirreq.split(",");
+          String directory = parts[0];
+          String date = parts[1];
+          String share = parts[parts.length - 1];
+          for (int i = 2; i < parts.length - 1; i++) {
+            String country = countryList.get(i - 2);
+            String key = directory + "," + date + "," + country;
+            String requests = parts[i];
+            String value = requests + "," + share;
+            insertRows.put(key, value);
+          }
+        }
+        Connection conn = DriverManager.getConnection(connectionURL);
+        conn.setAutoCommit(false);
+        Statement statement = conn.createStatement();
+        ResultSet rs = statement.executeQuery(
+            "SELECT source, date, country, requests, share "
+            + "FROM dirreq_stats");
+        while (rs.next()) {
+          String source = rs.getString(1);
+          String date = rs.getDate(2).toString();
+          String country = rs.getString(3);
+          String key = source + "," + date + "," + country;
+          if (insertRows.containsKey(key)) {
+            String insertRow = insertRows.remove(key);
+            long oldUsers = rs.getLong(4);
+            long newUsers = Long.parseLong(insertRow.split(",")[0]);
+            if (oldUsers != newUsers) {
+              updateRows.put(key, insertRow);
+            }
+          }
+        }
+        rs.close();
+        PreparedStatement psU = conn.prepareStatement(
+            "UPDATE dirreq_stats SET requests = ?, share = ? "
+            + "WHERE source = ? AND date = ? AND country = ?");
+        for (Map.Entry<String, String> e : updateRows.entrySet()) {
+          String[] keyParts = e.getKey().split(",");
+          String[] valueParts = e.getValue().split(",");
+          String source = keyParts[0];
+          java.sql.Date date = java.sql.Date.valueOf(keyParts[1]);
+          String country = keyParts[2];
+          long requests = Long.parseLong(valueParts[0]);
+          double share = Double.parseDouble(valueParts[1]);
+          psU.clearParameters();
+          psU.setLong(1, requests);
+          psU.setDouble(2, share);
+          psU.setString(3, source);
+          psU.setDate(4, date);
+          psU.setString(5, country);
+          psU.executeUpdate();
+        }
+        PreparedStatement psI = conn.prepareStatement(
+            "INSERT INTO dirreq_stats (requests, share, source, date, "
+            + "country) VALUES (?, ?, ?, ?, ?)");
+        for (Map.Entry<String, String> e : insertRows.entrySet()) {
+          String[] keyParts = e.getKey().split(",");
+          String[] valueParts = e.getValue().split(",");
+          String source = keyParts[0];
+          java.sql.Date date = java.sql.Date.valueOf(keyParts[1]);
+          String country = keyParts[2];
+          long requests = Long.parseLong(valueParts[0]);
+          double share = Double.parseDouble(valueParts[1]);
+          psI.clearParameters();
+          psI.setLong(1, requests);
+          psI.setDouble(2, share);
+          psI.setString(3, source);
+          psI.setDate(4, date);
+          psI.setString(5, country);
+          psI.executeUpdate();
+        }
+        conn.commit();
+        conn.close();
+      } catch (SQLException e) {
+        logger.log(Level.WARNING, "Failed to add directory requests by "
+            + "country to database.", e);
+      }
+    }
+
     /* Set modification flag to false again. */
     this.dirreqsModified = false;
 
diff --git a/src/org/torproject/ernie/db/GetTorProcessor.java b/src/org/torproject/ernie/db/GetTorProcessor.java
index 0425866..ed1b7ff 100644
--- a/src/org/torproject/ernie/db/GetTorProcessor.java
+++ b/src/org/torproject/ernie/db/GetTorProcessor.java
@@ -4,11 +4,12 @@ package org.torproject.ernie.db;
 
 import java.io.*;
 import java.net.*;
+import java.sql.*;
 import java.util.*;
 import java.util.logging.*;
 
 public class GetTorProcessor {
-  public GetTorProcessor(String gettorStatsUrl) {
+  public GetTorProcessor(String gettorStatsUrl, String connectionURL) {
     Logger logger = Logger.getLogger(TorperfProcessor.class.getName());
     String unparsed = null;
     try {
@@ -26,10 +27,10 @@ public class GetTorProcessor {
         byte[] data = new byte[1024];
         while ((len = in.read(data, 0, 1024)) >= 0) {
           sb.append(new String(data, 0, len));
-        }   
+        }
         in.close();
         unparsed = sb.toString();
-      }   
+      }
       logger.fine("Finished downloading gettor stats.");
     } catch (IOException e) {
       logger.log(Level.WARNING, "Failed downloading gettor stats", e);
@@ -88,6 +89,76 @@ public class GetTorProcessor {
           + statsFile.getAbsolutePath() + "!", e);
     }
 
+    /* Write results to database. */
+    if (connectionURL != null) {
+      try {
+        Map<String, Integer> updateRows = new HashMap<String, Integer>(),
+            insertRows = new HashMap<String, Integer>();
+        for (Map.Entry<String, Map<String, Integer>> e :
+            data.entrySet()) {
+          String date = e.getKey();
+          Map<String, Integer> obs = e.getValue();
+          for (String column : columns) {
+            if (obs.containsKey(column)) {
+              Integer value = obs.get(column);
+              String key = date + "," + column;
+              insertRows.put(key, value);
+            }
+          }
+        }
+        Connection conn = DriverManager.getConnection(connectionURL);
+        PreparedStatement psI = conn.prepareStatement(
+            "INSERT INTO gettor_stats (downloads, date, bundle) "
+            + "VALUES (?, ?, ?)");
+        PreparedStatement psU = conn.prepareStatement(
+            "UPDATE gettor_stats SET downloads = ? "
+            + "WHERE date = ? AND bundle = ?");
+        conn.setAutoCommit(false);
+        Statement statement = conn.createStatement();
+        ResultSet rs = statement.executeQuery(
+            "SELECT date, bundle, downloads FROM gettor_stats");
+        while (rs.next()) {
+          String date = rs.getDate(1).toString();
+          String bundle = rs.getString(2);
+          String key = date + "," + bundle;
+          if (insertRows.containsKey(key)) {
+            int insertRow = insertRows.remove(key);
+            int oldCount = rs.getInt(3);
+            if (insertRow != oldCount) {
+              updateRows.put(key, insertRow);
+            }
+          }
+        }
+        for (Map.Entry<String, Integer> e : updateRows.entrySet()) {
+          String[] keyParts = e.getKey().split(",");
+          java.sql.Date date = java.sql.Date.valueOf(keyParts[0]);
+          String bundle = keyParts[1];
+          int downloads = e.getValue();
+          psU.clearParameters();
+          psU.setLong(1, downloads);
+          psU.setDate(2, date);
+          psU.setString(3, bundle);
+          psU.executeUpdate();
+        }
+        for (Map.Entry<String, Integer> e : insertRows.entrySet()) {
+          String[] keyParts = e.getKey().split(",");
+          java.sql.Date date = java.sql.Date.valueOf(keyParts[0]);
+          String bundle = keyParts[1];
+          int downloads = e.getValue();
+          psI.clearParameters();
+          psI.setLong(1, downloads);
+          psI.setDate(2, date);
+          psI.setString(3, bundle);
+          psI.executeUpdate();
+        }
+        conn.commit();
+        conn.close();
+      } catch (SQLException e) {
+        logger.log(Level.WARNING, "Failed to add GetTor stats to "
+            + "database.", e);
+      }
+    }
+
     logger.info("Finished downloading and processing statistics on Tor "
         + "packages delivered by GetTor.\nDownloaded " + unparsed.length()
         + " bytes. Last date in statistics is " + data.lastKey() + ".");
diff --git a/src/org/torproject/ernie/db/Main.java b/src/org/torproject/ernie/db/Main.java
index dd51acb..207c594 100644
--- a/src/org/torproject/ernie/db/Main.java
+++ b/src/org/torproject/ernie/db/Main.java
@@ -36,11 +36,17 @@ public class Main {
 
     // Prepare stats file handlers (only if we are writing stats)
     ConsensusStatsFileHandler csfh = config.getWriteConsensusStats() ?
-        new ConsensusStatsFileHandler() : null;
+        new ConsensusStatsFileHandler(
+        config.getWriteAggregateStatsDatabase() ?
+        config.getRelayDescriptorDatabaseJDBC() : null) : null;
     BridgeStatsFileHandler bsfh = config.getWriteBridgeStats() ?
-        new BridgeStatsFileHandler(countries) : null;
+        new BridgeStatsFileHandler(countries,
+        config.getWriteAggregateStatsDatabase() ?
+        config.getRelayDescriptorDatabaseJDBC() : null) : null;
     DirreqStatsFileHandler dsfh = config.getWriteDirreqStats() ?
-        new DirreqStatsFileHandler(countries) : null;
+        new DirreqStatsFileHandler(countries,
+        config.getWriteAggregateStatsDatabase() ?
+        config.getRelayDescriptorDatabaseJDBC() : null) : null;
     ServerDescriptorStatsFileHandler sdsfh =
         config.getWriteServerDescriptorStats() ?
         new ServerDescriptorStatsFileHandler(config.getRelayVersions(),
@@ -185,12 +191,16 @@ public class Main {
 
     // Import and process torperf stats
     if (config.getImportWriteTorperfStats()) {
-      new TorperfProcessor(config.getTorperfDirectory());
+      new TorperfProcessor(config.getTorperfDirectory(),
+          config.getWriteAggregateStatsDatabase() ?
+          config.getRelayDescriptorDatabaseJDBC() : null);
     }
 
     // Download and process GetTor stats
     if (config.getDownloadProcessGetTorStats()) {
-      new GetTorProcessor(config.getGetTorStatsUrl());
+      new GetTorProcessor(config.getGetTorStatsUrl(),
+          config.getWriteAggregateStatsDatabase() ?
+          config.getRelayDescriptorDatabaseJDBC() : null);
     }
 
     // Download exit list and store it to disk
diff --git a/src/org/torproject/ernie/db/TorperfProcessor.java b/src/org/torproject/ernie/db/TorperfProcessor.java
index 6b4f633..399a6f3 100644
--- a/src/org/torproject/ernie/db/TorperfProcessor.java
+++ b/src/org/torproject/ernie/db/TorperfProcessor.java
@@ -3,12 +3,13 @@
 package org.torproject.ernie.db;
 
 import java.io.*;
+import java.sql.*;
 import java.text.*;
 import java.util.*;
 import java.util.logging.*;
 
 public class TorperfProcessor {
-  public TorperfProcessor(String torperfDirectory) {
+  public TorperfProcessor(String torperfDirectory, String connectionURL) {
     Logger logger = Logger.getLogger(TorperfProcessor.class.getName());
     File rawFile = new File("stats/torperf-raw");
     File statsFile = new File("stats/torperf-stats");
@@ -89,8 +90,7 @@ public class TorperfProcessor {
                   && !parts[16].equals("0")
                   && Long.parseLong(parts[19]) > receivedBytes) {
                 long startSec = Long.parseLong(parts[0]);
-                String dateTime = formatter.format(
-                    new Date(startSec * 1000L));
+                String dateTime = formatter.format(startSec * 1000L);
                 long completeMillis = Long.parseLong(parts[16])
                     * 1000L + Long.parseLong(parts[17]) / 1000L
                     - Long.parseLong(parts[0]) * 1000L
@@ -194,6 +194,79 @@ public class TorperfProcessor {
           + " " + lastLine.split(",")[2]);
     }
     logger.info(dumpStats.toString());
+
+    /* Write results to database. */
+    if (connectionURL != null) {
+      try {
+        Map<String, String> insertRows = new HashMap<String, String>();
+        insertRows.putAll(stats);
+        Set<String> updateRows = new HashSet<String>();
+        Connection conn = DriverManager.getConnection(connectionURL);
+        conn.setAutoCommit(false);
+        Statement statement = conn.createStatement();
+        ResultSet rs = statement.executeQuery(
+            "SELECT date, source, q1, md, q3 FROM torperf_stats");
+        while (rs.next()) {
+          String date = rs.getDate(1).toString();
+          String source = rs.getString(2);
+          String key = source + "," + date;
+          if (insertRows.containsKey(key)) {
+            String insertRow = insertRows.remove(key);
+            String[] newStats = insertRow.split(",");
+            long newQ1 = Long.parseLong(newStats[2]);
+            long newMd = Long.parseLong(newStats[3]);
+            long newQ3 = Long.parseLong(newStats[4]);
+            long oldQ1 = rs.getLong(3);
+            long oldMd = rs.getLong(4);
+            long oldQ3 = rs.getLong(5);
+            if (newQ1 != oldQ1 || newMd != oldMd || newQ3 != oldQ3) {
+              updateRows.add(insertRow);
+            }
+          }
+        }
+        PreparedStatement psU = conn.prepareStatement(
+            "UPDATE torperf_stats SET q1 = ?, md = ?, q3 = ? "
+            + "WHERE date = ? AND source = ?");
+        for (String row : updateRows) {
+          String[] newStats = row.split(",");
+          String source = newStats[0];
+          java.sql.Date date = java.sql.Date.valueOf(newStats[1]);
+          long q1 = Long.parseLong(newStats[2]);
+          long md = Long.parseLong(newStats[3]);
+          long q3 = Long.parseLong(newStats[4]);
+          psU.clearParameters();
+          psU.setLong(1, q1);
+          psU.setLong(2, md);
+          psU.setLong(3, q3);
+          psU.setDate(4, date);
+          psU.setString(5, source);
+          psU.executeUpdate();
+        }
+        PreparedStatement psI = conn.prepareStatement(
+            "INSERT INTO torperf_stats (q1, md, q3, date, source) "
+            + "VALUES (?, ?, ?, ?, ?)");
+        for (String row : insertRows.values()) {
+          String[] newStats = row.split(",");
+          String source = newStats[0];
+          java.sql.Date date = java.sql.Date.valueOf(newStats[1]);
+          long q1 = Long.parseLong(newStats[2]);
+          long md = Long.parseLong(newStats[3]);
+          long q3 = Long.parseLong(newStats[4]);
+          psI.clearParameters();
+          psI.setLong(1, q1);
+          psI.setLong(2, md);
+          psI.setLong(3, q3);
+          psI.setDate(4, date);
+          psI.setString(5, source);
+          psI.executeUpdate();
+        }
+        conn.commit();
+        conn.close();
+      } catch (SQLException e) {
+        logger.log(Level.WARNING, "Failed to add torperf stats to "
+            + "database.", e);
+      }
+    }
   }
 }
 
-- 
1.7.1