[Author Prev][Author Next][Thread Prev][Thread Next][Author Index][Thread Index]

[tor-commits] [ooni-probe/master] Move all nettests from data/ directory to the ooni/nettests directory.



commit 30ad9e2abde45ed3946d7b00cf1e7939f7db1cfd
Author: Arturo Filastò <art@xxxxxxxxx>
Date:   Wed Sep 11 17:09:47 2013 +0200

    Move all nettests from data/ directory to the ooni/nettests directory.
---
 data/nettests/blocking/__init__.py                 |    1 -
 data/nettests/blocking/dnsconsistency.py           |  175 -----
 data/nettests/blocking/http_requests.py            |  129 ----
 data/nettests/blocking/tcpconnect.py               |   69 --
 .../experimental/bridge_reachability/bridget.py    |  462 -----------
 .../experimental/bridge_reachability/echo.py       |  132 ----
 data/nettests/experimental/chinatrigger.py         |  108 ---
 data/nettests/experimental/dns_injection.py        |   63 --
 data/nettests/experimental/domclass_collector.py   |   33 -
 .../experimental/http_filtering_bypassing.py       |   84 --
 .../experimental/http_keyword_filtering.py         |   45 --
 data/nettests/experimental/http_trix.py            |   47 --
 .../experimental/http_uk_mobile_networks.py        |   85 --
 data/nettests/experimental/keyword_filtering.py    |   52 --
 data/nettests/experimental/parasitictraceroute.py  |  129 ----
 data/nettests/experimental/script.py               |   90 ---
 data/nettests/experimental/squid.py                |  117 ---
 data/nettests/experimental/tls_handshake.py        |  809 --------------------
 data/nettests/manipulation/captiveportal.py        |  650 ----------------
 data/nettests/manipulation/daphne.py               |  119 ---
 data/nettests/manipulation/dnsspoof.py             |   70 --
 .../manipulation/http_header_field_manipulation.py |  190 -----
 data/nettests/manipulation/http_host.py            |  152 ----
 .../manipulation/http_invalid_request_line.py      |  108 ---
 data/nettests/manipulation/traceroute.py           |  144 ----
 data/nettests/scanning/http_url_list.py            |   98 ---
 data/nettests/third_party/Makefile                 |    3 -
 data/nettests/third_party/README                   |   14 -
 data/nettests/third_party/netalyzr.py              |   58 --
 ooni/nettests/blocking/__init__.py                 |    1 +
 ooni/nettests/blocking/dnsconsistency.py           |  175 +++++
 ooni/nettests/blocking/http_requests.py            |  129 ++++
 ooni/nettests/blocking/tcpconnect.py               |   69 ++
 .../experimental/bridge_reachability/bridget.py    |  462 +++++++++++
 .../experimental/bridge_reachability/echo.py       |  132 ++++
 ooni/nettests/experimental/chinatrigger.py         |  108 +++
 ooni/nettests/experimental/dns_injection.py        |   63 ++
 ooni/nettests/experimental/domclass_collector.py   |   33 +
 .../experimental/http_filtering_bypassing.py       |   84 ++
 .../experimental/http_keyword_filtering.py         |   45 ++
 ooni/nettests/experimental/http_trix.py            |   47 ++
 .../experimental/http_uk_mobile_networks.py        |   85 ++
 ooni/nettests/experimental/keyword_filtering.py    |   52 ++
 ooni/nettests/experimental/parasitictraceroute.py  |  129 ++++
 ooni/nettests/experimental/script.py               |   90 +++
 ooni/nettests/experimental/squid.py                |  117 +++
 ooni/nettests/experimental/tls_handshake.py        |  809 ++++++++++++++++++++
 ooni/nettests/manipulation/captiveportal.py        |  650 ++++++++++++++++
 ooni/nettests/manipulation/daphne.py               |  119 +++
 ooni/nettests/manipulation/dnsspoof.py             |   70 ++
 .../manipulation/http_header_field_manipulation.py |  190 +++++
 ooni/nettests/manipulation/http_host.py            |  152 ++++
 .../manipulation/http_invalid_request_line.py      |  108 +++
 ooni/nettests/manipulation/traceroute.py           |  144 ++++
 ooni/nettests/scanning/http_url_list.py            |   98 +++
 ooni/nettests/third_party/Makefile                 |    3 +
 ooni/nettests/third_party/README                   |   14 +
 ooni/nettests/third_party/netalyzr.py              |   58 ++
 58 files changed, 4236 insertions(+), 4236 deletions(-)

diff --git a/data/nettests/__init__.py b/data/nettests/__init__.py
deleted file mode 100644
index e69de29..0000000
diff --git a/data/nettests/blocking/__init__.py b/data/nettests/blocking/__init__.py
deleted file mode 100644
index 8b13789..0000000
--- a/data/nettests/blocking/__init__.py
+++ /dev/null
@@ -1 +0,0 @@
-
diff --git a/data/nettests/blocking/dnsconsistency.py b/data/nettests/blocking/dnsconsistency.py
deleted file mode 100644
index 3c88cd2..0000000
--- a/data/nettests/blocking/dnsconsistency.py
+++ /dev/null
@@ -1,175 +0,0 @@
-# -*- encoding: utf-8 -*-
-#
-#  dnsconsistency
-#  **************
-#
-#  The test reports censorship if the cardinality of the intersection of
-#  the query result set from the control server and the query result set
-#  from the experimental server is zero, which is to say, if the two sets
-#  have no matching results whatsoever.
-#
-#  NOTE: This test frequently results in false positives due to GeoIP-based
-#  load balancing on major global sites such as google, facebook, and
-#  youtube, etc.
-#
-# :authors: Arturo Filastò, Isis Lovecruft
-# :licence: see LICENSE
-
-import pdb
-
-from twisted.python import usage
-from twisted.internet import defer
-
-from ooni.templates import dnst
-
-from ooni import nettest
-from ooni.utils import log
-
-class UsageOptions(usage.Options):
-    optParameters = [['backend', 'b', '8.8.8.8:53',
-                        'The OONI backend that runs the DNS resolver'],
-                     ['testresolvers', 'T', None,
-                        'File containing list of DNS resolvers to test against'],
-                     ['testresolver', 't', None,
-                         'Specify a single test resolver to use for testing']
-                    ]
-
-class DNSConsistencyTest(dnst.DNSTest):
-
-    name = "DNS Consistency"
-    description = "DNS censorship detection test"
-    version = "0.6"
-    authors = "Arturo Filastò, Isis Lovecruft"
-    requirements = None
-
-    inputFile = ['file', 'f', None,
-                 'Input file of list of hostnames to attempt to resolve']
-    
-    requiredTestHelpers = {'backend': 'dns'}
-
-    usageOptions = UsageOptions
-    requiredOptions = ['backend', 'file']
-
-    def setUp(self):
-        if (not self.localOptions['testresolvers'] and \
-                not self.localOptions['testresolver']):
-            raise usage.UsageError("You did not specify a testresolver")
-
-        elif self.localOptions['testresolvers']:
-            test_resolvers_file = self.localOptions['testresolvers']
-
-        elif self.localOptions['testresolver']:
-            self.test_resolvers = [self.localOptions['testresolver']]
-
-        try:
-            with open(test_resolvers_file) as f:
-                self.test_resolvers = [x.split('#')[0].strip() for x in f.readlines()]
-                self.report['test_resolvers'] = self.test_resolvers
-            f.close()
-
-        except IOError, e:
-            log.exception(e)
-            raise usage.UsageError("Invalid test resolvers file")
-
-        except NameError:
-            log.debug("No test resolver file configured")
-
-        dns_ip, dns_port = self.localOptions['backend'].split(':')
-        self.control_dns_server = (dns_ip, int(dns_port))
-
-        self.report['control_resolver'] = self.control_dns_server
-
-    @defer.inlineCallbacks
-    def test_a_lookup(self):
-        """
-        We perform an A lookup on the DNS test servers for the domains to be
-        tested and an A lookup on the known good DNS server.
-
-        We then compare the results from test_resolvers and that from
-        control_resolver and see if the match up.
-        If they match up then no censorship is happening (tampering: false).
-
-        If they do not we do a reverse lookup (PTR) on the test_resolvers and
-        the control resolver for every IP address we got back and check to see
-        if anyone of them matches the control ones.
-
-        If they do then we take not of the fact that censorship is probably not
-        happening (tampering: reverse-match).
-
-        If they do not match then censorship is probably going on (tampering:
-        true).
-        """
-        log.msg("Doing the test lookups on %s" % self.input)
-        list_of_ds = []
-        hostname = self.input
-
-        self.report['tampering'] = {}
-
-        control_answers = yield self.performALookup(hostname, self.control_dns_server)
-        if not control_answers:
-                log.err("Got no response from control DNS server %s," \
-                        " perhaps the DNS resolver is down?" % self.control_dns_server[0])
-                self.report['tampering'][self.control_dns_server] = 'no_answer'
-                return
-
-        for test_resolver in self.test_resolvers:
-            log.msg("Testing resolver: %s" % test_resolver)
-            test_dns_server = (test_resolver, 53)
-
-            try:
-                experiment_answers = yield self.performALookup(hostname, test_dns_server)
-            except Exception, e:
-                log.err("Problem performing the DNS lookup")
-                log.exception(e)
-                self.report['tampering'][test_resolver] = 'dns_lookup_error'
-                continue
-
-            if not experiment_answers:
-                log.err("Got no response, perhaps the DNS resolver is down?")
-                self.report['tampering'][test_resolver] = 'no_answer'
-                continue
-            else:
-                log.debug("Got the following A lookup answers %s from %s" % (experiment_answers, test_resolver))
-
-            def lookup_details():
-                """
-                A closure useful for printing test details.
-                """
-                log.msg("test resolver: %s" % test_resolver)
-                log.msg("experiment answers: %s" % experiment_answers)
-                log.msg("control answers: %s" % control_answers)
-
-            log.debug("Comparing %s with %s" % (experiment_answers, control_answers))
-            if set(experiment_answers) & set(control_answers):
-                lookup_details()
-                log.msg("tampering: false")
-                self.report['tampering'][test_resolver] = False
-            else:
-                log.msg("Trying to do reverse lookup")
-
-                experiment_reverse = yield self.performPTRLookup(experiment_answers[0], test_dns_server)
-                control_reverse = yield self.performPTRLookup(control_answers[0], self.control_dns_server)
-
-                if experiment_reverse == control_reverse:
-                    log.msg("Further testing has eliminated false positives")
-                    lookup_details()
-                    log.msg("tampering: reverse_match")
-                    self.report['tampering'][test_resolver] = 'reverse_match'
-                else:
-                    log.msg("Reverse lookups do not match")
-                    lookup_details()
-                    log.msg("tampering: true")
-                    self.report['tampering'][test_resolver] = True
-
-    def inputProcessor(self, filename=None):
-        """
-        This inputProcessor extracts domain names from urls
-        """
-        log.debug("Running dnsconsistency default processor")
-        if filename:
-            fp = open(filename)
-            for x in fp.readlines():
-                yield x.strip().split('//')[-1].split('/')[0]
-            fp.close()
-        else:
-            pass
diff --git a/data/nettests/blocking/http_requests.py b/data/nettests/blocking/http_requests.py
deleted file mode 100644
index 8c74762..0000000
--- a/data/nettests/blocking/http_requests.py
+++ /dev/null
@@ -1,129 +0,0 @@
-# -*- encoding: utf-8 -*-
-#
-# :authors: Arturo Filastò
-# :licence: see LICENSE
-
-import random
-from twisted.internet import defer
-from twisted.python import usage
-
-from ooni.utils import log
-from ooni.utils.net import userAgents
-from ooni.templates import httpt
-from ooni.errors import failureToString, handleAllFailures
-
-class UsageOptions(usage.Options):
-    optParameters = [
-                     ['url', 'u', None, 'Specify a single URL to test.'],
-                     ['factor', 'f', 0.8, 'What factor should be used for triggering censorship (0.8 == 80%)']
-                    ]
-
-class HTTPRequestsTest(httpt.HTTPTest):
-    """
-    Performs a two GET requests to the set of sites to be tested for
-    censorship, one over a known good control channel (Tor), the other over the
-    test network.
-
-    We check to see if the response headers match and if the response body
-    lengths match.
-    """
-    name = "HTTP Requests Test"
-    author = "Arturo Filastò"
-    version = "0.2.3"
-
-    usageOptions = UsageOptions
-
-    inputFile = ['file', 'f', None,
-            'List of URLS to perform GET and POST requests to']
-
-    # These values are used for determining censorship based on response body
-    # lengths
-    control_body_length = None
-    experiment_body_length = None
-
-    def setUp(self):
-        """
-        Check for inputs.
-        """
-        if self.input:
-            self.url = self.input
-        elif self.localOptions['url']:
-            self.url = self.localOptions['url']
-        else:
-            raise Exception("No input specified")
-
-        self.factor = self.localOptions['factor']
-        self.report['control_failure'] = None
-        self.report['experiment_failure'] = None
-
-    def compare_body_lengths(self, body_length_a, body_length_b):
-
-        if body_length_b == 0 and body_length_a != 0:
-            rel = float(body_length_b)/float(body_length_a)
-        elif body_length_b == 0 and body_length_a == 0:
-            rel = float(1)
-        else:
-            rel = float(body_length_a)/float(body_length_b)
-
-        if rel > 1:
-            rel = 1/rel
-
-        self.report['body_proportion'] = rel
-        self.report['factor'] = float(self.factor)
-        if rel > float(self.factor):
-            log.msg("The two body lengths appear to match")
-            log.msg("censorship is probably not happening")
-            self.report['body_length_match'] = True
-        else:
-            log.msg("The two body lengths appear to not match")
-            log.msg("censorship could be happening")
-            self.report['body_length_match'] = False
-
-    def compare_headers(self, headers_a, headers_b):
-        diff = headers_a.getDiff(headers_b)
-        if diff:
-            log.msg("Headers appear to *not* match")
-            self.report['headers_diff'] = diff
-            self.report['headers_match'] = False
-        else:
-            log.msg("Headers appear to match")
-            self.report['headers_diff'] = diff
-            self.report['headers_match'] = True
-
-    def test_get(self):
-        def callback(res):
-            experiment, control = res
-            experiment_succeeded, experiment_result = experiment
-            control_succeeded, control_result = control
-
-            if control_succeeded and experiment_succeeded:
-                self.compare_body_lengths(len(experiment_result.body),
-                        len(control_result.body))
-
-                self.compare_headers(control_result.headers,
-                        experiment_result.headers)
-
-            if not control_succeeded:
-                self.report['control_failure'] = failureToString(control_result)
-
-            if not experiment_succeeded:
-                self.report['experiment_failure'] = failureToString(experiment_result)
-
-        headers = {'User-Agent': [random.choice(userAgents)]}
-
-        l = []
-        log.msg("Performing GET request to %s" % self.url)
-        experiment_request = self.doRequest(self.url, method="GET",
-                headers=headers)
-
-        control_request = self.doRequest(self.url, method="GET",
-                use_tor=True, headers=headers)
-
-        l.append(experiment_request)
-        l.append(control_request)
-
-        dl = defer.DeferredList(l, consumeErrors=True)
-        dl.addCallback(callback)
-
-        return dl
-
diff --git a/data/nettests/blocking/tcpconnect.py b/data/nettests/blocking/tcpconnect.py
deleted file mode 100644
index 5b432e0..0000000
--- a/data/nettests/blocking/tcpconnect.py
+++ /dev/null
@@ -1,69 +0,0 @@
-# -*- encoding: utf-8 -*-
-from twisted.internet.protocol import Factory, Protocol
-from twisted.internet.endpoints import TCP4ClientEndpoint
-
-from twisted.internet.error import ConnectionRefusedError
-from twisted.internet.error import TCPTimedOutError, TimeoutError
-
-from ooni import nettest
-from ooni.errors import handleAllFailures
-from ooni.utils import log
-
-class TCPFactory(Factory):
-    def buildProtocol(self, addr):
-        return Protocol()
-
-class TCPConnectTest(nettest.NetTestCase):
-    name = "TCP Connect"
-    author = "Arturo Filastò"
-    version = "0.1"
-    inputFile = ['file', 'f', None,
-            'File containing the IP:PORT combinations to be tested, one per line']
-
-    requiredOptions = ['file']
-    def test_connect(self):
-        """
-        This test performs a TCP connection to the remote host on the specified port.
-        the report will contains the string 'success' if the test has
-        succeeded, or the reason for the failure if it has failed.
-        """
-        host, port = self.input.split(":")
-        def connectionSuccess(protocol):
-            protocol.transport.loseConnection()
-            log.debug("Got a connection to %s" % self.input)
-            self.report["connection"] = 'success'
-
-        def connectionFailed(failure):
-            self.report['connection'] = handleAllFailures(failure)
-
-        from twisted.internet import reactor
-        point = TCP4ClientEndpoint(reactor, host, int(port))
-        d = point.connect(TCPFactory())
-        d.addCallback(connectionSuccess)
-        d.addErrback(connectionFailed)
-        return d
-
-    def inputProcessor(self, filename=None):
-        """
-        This inputProcessor extracts name:port pairs from urls
-        XXX: Does not support unusual port numbers
-        """
-        def strip_url(address):
-            proto, path = x.strip().split('://')
-            proto = proto.lower()
-            host = path.split('/')[0]
-            if proto == 'http':
-                return "%s:80" % host
-            if proto == 'https':
-                return "%s:443" % host
-
-        if filename:
-            fp = open(filename)
-            for x in fp.readlines():
-                if x.startswith("http"):
-                    yield strip_url(x)
-                else:
-                    yield x.strip()
-            fp.close()
-        else:
-            pass
diff --git a/data/nettests/experimental/__init__.py b/data/nettests/experimental/__init__.py
deleted file mode 100644
index e69de29..0000000
diff --git a/data/nettests/experimental/bridge_reachability/bridget.py b/data/nettests/experimental/bridge_reachability/bridget.py
deleted file mode 100644
index acf3dff..0000000
--- a/data/nettests/experimental/bridge_reachability/bridget.py
+++ /dev/null
@@ -1,462 +0,0 @@
-#!/usr/bin/env python
-# -*- encoding: utf-8 -*-
-#
-#  +-----------+
-#  |  BRIDGET  |
-#  |        +--------------------------------------------+
-#  +--------| Use a Tor process to test making a Tor     |
-#           | connection to a list of bridges or relays. |
-#           +--------------------------------------------+
-#
-# :authors: Isis Lovecruft, Arturo Filasto
-# :licence: see included LICENSE
-# :version: 0.1.0-alpha
-
-from __future__           import with_statement
-from functools            import partial
-from random               import randint
-
-import os
-import sys
-
-from twisted.python       import usage
-from twisted.internet     import defer, error, reactor
-
-from ooni                 import nettest
-
-from ooni.utils           import log, date
-from ooni.utils.config    import ValueChecker
-
-from ooni.utils.onion     import TxtorconImportError
-from ooni.utils.onion     import PTNoBridgesException, PTNotFoundException
-
-
-try:
-    from ooni.utils.onion     import parse_data_dir
-except:
-    log.msg("Please go to /ooni/lib and do 'make txtorcon' to run this test!")
-
-class MissingAssetException(Exception):
-    pass
-
-class RandomPortException(Exception):
-    """Raised when using a random port conflicts with configured ports."""
-    def __init__(self):
-        log.msg("Unable to use random and specific ports simultaneously")
-        return sys.exit()
-
-class BridgetArgs(usage.Options):
-    """Commandline options."""
-    allowed = "Port to use for Tor's %s, must be between 1024 and 65535."
-    sock_check = ValueChecker(allowed % "SocksPort").port_check
-    ctrl_check = ValueChecker(allowed % "ControlPort").port_check
-
-    optParameters = [
-        ['bridges', 'b', None,
-         'File listing bridge IP:ORPorts to test'],
-        ['relays', 'f', None,
-         'File listing relay IPs to test'],
-        ['socks', 's', 9049, None, sock_check],
-        ['control', 'c', 9052, None, ctrl_check],
-        ['torpath', 'p', None,
-         'Path to the Tor binary to use'],
-        ['datadir', 'd', None,
-         'Tor DataDirectory to use'],
-        ['transport', 't', None,
-         'Tor ClientTransportPlugin'],
-        ['resume', 'r', 0,
-         'Resume at this index']]
-    optFlags = [['random', 'x', 'Use random ControlPort and SocksPort']]
-
-    def postOptions(self):
-        if not self['bridges'] and not self['relays']:
-            raise MissingAssetException(
-                "Bridget can't run without bridges or relays to test!")
-        if self['transport']:
-            ValueChecker.uid_check(
-                "Can't run bridget as root with pluggable transports!")
-            if not self['bridges']:
-                raise PTNoBridgesException
-        if self['socks'] or self['control']:
-            if self['random']:
-                raise RandomPortException
-        if self['datadir']:
-            ValueChecker.dir_check(self['datadir'])
-        if self['torpath']:
-            ValueChecker.file_check(self['torpath'])
-
-class BridgetTest(nettest.NetTestCase):
-    """
-    XXX fill me in
-
-    :ivar config:
-        An :class:`ooni.lib.txtorcon.TorConfig` instance.
-    :ivar relays:
-        A list of all provided relays to test.
-    :ivar bridges:
-        A list of all provided bridges to test.
-    :ivar socks_port:
-        Integer for Tor's SocksPort.
-    :ivar control_port:
-        Integer for Tor's ControlPort.
-    :ivar transport:
-        String defining the Tor's ClientTransportPlugin, for testing
-        a bridge's pluggable transport functionality.
-    :ivar tor_binary:
-        Path to the Tor binary to use, e.g. \'/usr/sbin/tor\'
-    """
-    name    = "bridget"
-    author  = "Isis Lovecruft <isis@xxxxxxxxxxxxxx>"
-    version = "0.1"
-    description   = "Use a Tor process to test connecting to bridges or relays"
-    usageOptions = BridgetArgs
-
-    def setUp(self):
-        """
-        Extra initialization steps. We only want one child Tor process
-        running, so we need to deal with most of the TorConfig() only once,
-        before the experiment runs.
-        """
-        self.socks_port      = 9049
-        self.control_port    = 9052
-        self.circuit_timeout = 90
-        self.tor_binary      = '/usr/sbin/tor'
-        self.data_directory  = None
-
-        def read_from_file(filename):
-            log.msg("Loading information from %s ..." % opt)
-            with open(filename) as fp:
-                lst = []
-                for line in fp.readlines():
-                    if line.startswith('#'):
-                        continue
-                    else:
-                        lst.append(line.replace('\n',''))
-                return lst
-
-        def __count_remaining__(which):
-            total, reach, unreach = map(lambda x: which[x],
-                                        ['all', 'reachable', 'unreachable'])
-            count = len(total) - reach() - unreach()
-            return count
-
-        ## XXX should we do report['bridges_up'].append(self.bridges['current'])
-        self.bridges = {}
-        self.bridges['all'], self.bridges['up'], self.bridges['down'] = \
-            ([] for i in range(3))
-        self.bridges['reachable']   = lambda: len(self.bridges['up'])
-        self.bridges['unreachable'] = lambda: len(self.bridges['down'])
-        self.bridges['remaining']   = lambda: __count_remaining__(self.bridges)
-        self.bridges['current']     = None
-        self.bridges['pt_type']     = None
-        self.bridges['use_pt']      = False
-
-        self.relays = {}
-        self.relays['all'], self.relays['up'], self.relays['down'] = \
-            ([] for i in range(3))
-        self.relays['reachable']   = lambda: len(self.relays['up'])
-        self.relays['unreachable'] = lambda: len(self.relays['down'])
-        self.relays['remaining']   = lambda: __count_remaining__(self.relays)
-        self.relays['current']     = None
-
-        if self.localOptions:
-            try:
-                from txtorcon import TorConfig
-            except ImportError:
-                raise TxtorconImportError
-            else:
-                self.config = TorConfig()
-            finally:
-                options = self.localOptions
-
-            if options['bridges']:
-                self.config.UseBridges = 1
-                self.bridges['all'] = read_from_file(options['bridges'])
-            if options['relays']:
-                ## first hop must be in TorState().guards
-                # XXX where is this defined?
-                self.config.EntryNodes = ','.join(relay_list)
-                self.relays['all'] = read_from_file(options['relays'])
-            if options['socks']:
-                self.socks_port = options['socks']
-            if options['control']:
-                self.control_port = options['control']
-            if options['random']:
-                log.msg("Using randomized ControlPort and SocksPort ...")
-                self.socks_port   = randint(1024, 2**16)
-                self.control_port = randint(1024, 2**16)
-            if options['torpath']:
-                self.tor_binary = options['torpath']
-            if options['datadir']:
-                self.data_directory = parse_data_dir(options['datadir'])
-            if options['transport']:
-                ## ClientTransportPlugin transport exec pathtobinary [options]
-                ## XXX we need a better way to deal with all PTs
-                log.msg("Using ClientTransportPlugin %s" % options['transport'])
-                self.bridges['use_pt'] = True
-                [self.bridges['pt_type'], pt_exec] = \
-                    options['transport'].split(' ', 1)
-
-                if self.bridges['pt_type'] == "obfs2":
-                    self.config.ClientTransportPlugin = \
-                        self.bridges['pt_type'] + " " + pt_exec
-                else:
-                    raise PTNotFoundException
-
-            self.config.SocksPort            = self.socks_port
-            self.config.ControlPort          = self.control_port
-            self.config.CookieAuthentication = 1
-
-    def test_bridget(self):
-        """
-        if bridges:
-            1. configure first bridge line
-            2a. configure data_dir, if it doesn't exist
-            2b. write torrc to a tempfile in data_dir
-            3. start tor                              } if any of these
-            4. remove bridges which are public relays } fail, add current
-            5. SIGHUP for each bridge                 } bridge to unreach-
-                                                      } able bridges.
-        if relays:
-            1a. configure the data_dir, if it doesn't exist
-            1b. write torrc to a tempfile in data_dir
-            2. start tor
-            3. remove any of our relays which are already part of current
-               circuits
-            4a. attach CustomCircuit() to self.state
-            4b. RELAY_EXTEND for each relay } if this fails, add
-                                            } current relay to list
-                                            } of unreachable relays
-            5.
-        if bridges and relays:
-            1. configure first bridge line
-            2a. configure data_dir if it doesn't exist
-            2b. write torrc to a tempfile in data_dir
-            3. start tor
-            4. remove bridges which are public relays
-            5. remove any of our relays which are already part of current
-               circuits
-            6a. attach CustomCircuit() to self.state
-            6b. for each bridge, build three circuits, with three
-                relays each
-            6c. RELAY_EXTEND for each relay } if this fails, add
-                                            } current relay to list
-                                            } of unreachable relays
-
-        :param args:
-            The :class:`BridgetAsset` line currently being used. Except that it
-            in Bridget it doesn't, so it should be ignored and avoided.
-        """
-        try:
-            from ooni.utils         import process
-            from ooni.utils.onion   import remove_public_relays, start_tor
-            from ooni.utils.onion   import start_tor_filter_nodes
-            from ooni.utils.onion   import setup_fail, setup_done
-            from ooni.utils.onion   import CustomCircuit
-            from ooni.utils.timer   import deferred_timeout, TimeoutError
-            from ooni.lib.txtorcon  import TorConfig, TorState
-        except ImportError:
-            raise TxtorconImportError
-        except TxtorconImportError, tie:
-            log.err(tie)
-            sys.exit()
-
-        def reconfigure_done(state, bridges):
-            """
-            Append :ivar:`bridges['current']` to the list
-            :ivar:`bridges['up'].
-            """
-            log.msg("Reconfiguring with 'Bridge %s' successful"
-                    % bridges['current'])
-            bridges['up'].append(bridges['current'])
-            return state
-
-        def reconfigure_fail(state, bridges):
-            """
-            Append :ivar:`bridges['current']` to the list
-            :ivar:`bridges['down'].
-            """
-            log.msg("Reconfiguring TorConfig with parameters %s failed"
-                    % state)
-            bridges['down'].append(bridges['current'])
-            return state
-
-        @defer.inlineCallbacks
-        def reconfigure_bridge(state, bridges):
-            """
-            Rewrite the Bridge line in our torrc. If use of pluggable
-            transports was specified, rewrite the line as:
-                Bridge <transport_type> <IP>:<ORPort>
-            Otherwise, rewrite in the standard form:
-                Bridge <IP>:<ORPort>
-
-            :param state:
-                A fully bootstrapped instance of
-                :class:`ooni.lib.txtorcon.TorState`.
-            :param bridges:
-                A dictionary of bridges containing the following keys:
-
-                bridges['remaining'] :: A function returning and int for the
-                                        number of remaining bridges to test.
-                bridges['current']   :: A string containing the <IP>:<ORPort>
-                                        of the current bridge.
-                bridges['use_pt']    :: A boolean, True if we're testing
-                                        bridges with a pluggable transport;
-                                        False otherwise.
-                bridges['pt_type']   :: If :ivar:`bridges['use_pt'] is True,
-                                        this is a string containing the type
-                                        of pluggable transport to test.
-            :return:
-                :param:`state`
-            """
-            log.msg("Current Bridge: %s" % bridges['current'])
-            log.msg("We now have %d bridges remaining to test..."
-                    % bridges['remaining']())
-            try:
-                if bridges['use_pt'] is False:
-                    controller_response = yield state.protocol.set_conf(
-                        'Bridge', bridges['current'])
-                elif bridges['use_pt'] and bridges['pt_type'] is not None:
-                    controller_reponse = yield state.protocol.set_conf(
-                        'Bridge', bridges['pt_type'] +' '+ bridges['current'])
-                else:
-                    raise PTNotFoundException
-
-                if controller_response == 'OK':
-                    finish = yield reconfigure_done(state, bridges)
-                else:
-                    log.err("SETCONF for %s responded with error:\n %s"
-                            % (bridges['current'], controller_response))
-                    finish = yield reconfigure_fail(state, bridges)
-
-                defer.returnValue(finish)
-
-            except Exception, e:
-                log.err("Reconfiguring torrc with Bridge line %s failed:\n%s"
-                        % (bridges['current'], e))
-                defer.returnValue(None)
-
-        def attacher_extend_circuit(attacher, deferred, router):
-            ## XXX todo write me
-            ## state.attacher.extend_circuit
-            raise NotImplemented
-            #attacher.extend_circuit
-
-        def state_attach(state, path):
-            log.msg("Setting up custom circuit builder...")
-            attacher = CustomCircuit(state)
-            state.set_attacher(attacher, reactor)
-            state.add_circuit_listener(attacher)
-            return state
-
-            ## OLD
-            #for circ in state.circuits.values():
-            #    for relay in circ.path:
-            #        try:
-            #            relay_list.remove(relay)
-            #        except KeyError:
-            #            continue
-            ## XXX how do we attach to circuits with bridges?
-            d = defer.Deferred()
-            attacher.request_circuit_build(d)
-            return d
-
-        def state_attach_fail(state):
-            log.err("Attaching custom circuit builder failed: %s" % state)
-
-        log.msg("Bridget: initiating test ... ")  ## Start the experiment
-
-        ## if we've at least one bridge, and our config has no 'Bridge' line
-        if self.bridges['remaining']() >= 1 \
-                and not 'Bridge' in self.config.config:
-
-            ## configure our first bridge line
-            self.bridges['current'] = self.bridges['all'][0]
-            self.config.Bridge = self.bridges['current']
-                                                  ## avoid starting several
-            self.config.save()                    ## processes
-            assert self.config.config.has_key('Bridge'), "No Bridge Line"
-
-            ## start tor and remove bridges which are public relays
-            from ooni.utils.onion import start_tor_filter_nodes
-            state = start_tor_filter_nodes(reactor, self.config,
-                                           self.control_port, self.tor_binary,
-                                           self.data_directory, self.bridges)
-            #controller = defer.Deferred()
-            #controller.addCallback(singleton_semaphore, tor)
-            #controller.addErrback(setup_fail)
-            #bootstrap = defer.gatherResults([controller, filter_bridges],
-            #                                consumeErrors=True)
-
-            if state is not None:
-                log.debug("state:\n%s" % state)
-                log.debug("Current callbacks on TorState():\n%s"
-                          % state.callbacks)
-
-        ## if we've got more bridges
-        if self.bridges['remaining']() >= 2:
-            #all = []
-            for bridge in self.bridges['all'][1:]:
-                self.bridges['current'] = bridge
-                #new = defer.Deferred()
-                #new.addCallback(reconfigure_bridge, state, self.bridges)
-                #all.append(new)
-            #check_remaining = defer.DeferredList(all, consumeErrors=True)
-            #state.chainDeferred(check_remaining)
-                state.addCallback(reconfigure_bridge, self.bridges)
-
-        if self.relays['remaining']() > 0:
-            while self.relays['remaining']() >= 3:
-                #path = list(self.relays.pop() for i in range(3))
-                #log.msg("Trying path %s" % '->'.join(map(lambda node:
-                #                                         node, path)))
-                self.relays['current'] = self.relays['all'].pop()
-                for circ in state.circuits.values():
-                    for node in circ.path:
-                        if node == self.relays['current']:
-                            self.relays['up'].append(self.relays['current'])
-                    if len(circ.path) < 3:
-                        try:
-                            ext = attacher_extend_circuit(state.attacher, circ,
-                                                          self.relays['current'])
-                            ext.addCallback(attacher_extend_circuit_done,
-                                            state.attacher, circ,
-                                            self.relays['current'])
-                        except Exception, e:
-                            log.err("Extend circuit failed: %s" % e)
-                    else:
-                        continue
-
-        #state.callback(all)
-        #self.reactor.run()
-        return state
-
-    def disabled_startTest(self, args):
-        """
-        Local override of :meth:`OONITest.startTest` to bypass calling
-        self.control.
-
-        :param args:
-            The current line of :class:`Asset`, not used but kept for
-            compatibility reasons.
-        :return:
-            A fired deferred which callbacks :meth:`experiment` and
-            :meth:`OONITest.finished`.
-        """
-        self.start_time = date.now()
-        self.d = self.experiment(args)
-        self.d.addErrback(log.err)
-        self.d.addCallbacks(self.finished, log.err)
-        return self.d
-
-## ISIS' NOTES
-## -----------
-## TODO:
-##       x  cleanup documentation
-##       x  add DataDirectory option
-##       x  check if bridges are public relays
-##       o  take bridge_desc file as input, also be able to give same
-##          format as output
-##       x  Add asynchronous timeout for deferred, so that we don't wait
-##       o  Add assychronous timout for deferred, so that we don't wait
-##          forever for bridges that don't work.
diff --git a/data/nettests/experimental/bridge_reachability/echo.py b/data/nettests/experimental/bridge_reachability/echo.py
deleted file mode 100644
index d4033dd..0000000
--- a/data/nettests/experimental/bridge_reachability/echo.py
+++ /dev/null
@@ -1,132 +0,0 @@
-#!/usr/bin/env python
-# -*- coding: utf-8 -*-
-#
-#  +---------+
-#  | echo.py |
-#  +---------+
-#     A simple ICMP-8 ping test.
-#
-# @authors: Isis Lovecruft, <isis@xxxxxxxxxxxxxx>
-# @version: 0.0.2-pre-alpha
-# @license: copyright (c) 2012 Isis Lovecruft
-#           see attached LICENCE file
-#
-
-import os
-import sys
-
-from twisted.python   import usage
-from twisted.internet import reactor, defer
-from ooni             import nettest
-from ooni.utils       import log, net, Storage, txscapy
-
-try:
-    from scapy.all             import IP, ICMP
-    from scapy.all             import sr1
-    from ooni.lib              import txscapy
-    from ooni.lib.txscapy      import txsr, txsend
-    from ooni.templates.scapyt import BaseScapyTest
-except:
-    log.msg("This test requires scapy, see www.secdev.org/projects/scapy")
-
-class UsageOptions(usage.Options):
-    optParameters = [
-        ['dst', 'd', None, 'Host IP to ping'],
-        ['file', 'f', None, 'File of list of IPs to ping'],
-        ['interface', 'i', None, 'Network interface to use'],
-        ['count', 'c', 1, 'Number of packets to send', int],
-        ['size', 's', 56, 'Number of bytes to send in ICMP data field', int],
-        ['ttl', 'l', 25, 'Set the IP Time to Live', int],
-        ['timeout', 't', 2, 'Seconds until timeout if no response', int],
-        ['pcap', 'p', None, 'Save pcap to this file'],
-        ['receive', 'r', True, 'Receive response packets']]
-
-class EchoTest(nettest.NetTestCase):
-    """
-    xxx fill me in
-    """
-    name         = 'echo'
-    author       = 'Isis Lovecruft <isis@xxxxxxxxxxxxxx>'
-    description  = 'A simple ping test to see if a host is reachable.'
-    version      = '0.0.2'
-    requiresRoot = True
-
-    usageOptions    = UsageOptions
-    #requiredOptions = ['dst']
-
-    def setUp(self, *a, **kw):
-        self.destinations = {}
-
-        if self.localOptions:
-            for key, value in self.localOptions.items():
-                log.debug("setting self.%s = %s" % (key, value))
-                setattr(self, key, value)
-
-        self.timeout *= 1000            ## convert to milliseconds
-
-        if not self.interface:
-            try:
-                iface = txscapy.getDefaultIface()
-            except Exception, e:
-                log.msg("No network interface specified!")
-                log.err(e)
-            else:
-                log.msg("Using system default interface: %s" % iface)
-                self.interface = iface
-
-        if self.pcap:
-            try:
-                self.pcapfile = open(self.pcap, 'a+')
-            except:
-                log.msg("Unable to write to pcap file %s" % self.pcap)
-            else:
-                self.pcap = net.capturePacket(self.pcapfile)
-
-        if not self.dst:
-            if self.file:
-                self.dstProcessor(self.file)
-                for key, value in self.destinations.items():
-                    for label, data in value.items():
-                        if not 'ans' in data:
-                            self.dst = label
-        else:
-            self.addDest(self.dst)
-        log.debug("self.dst is now: %s" % self.dst)
-
-        log.debug("Initialization of %s test completed." % self.name)
-
-    def addDest(self, dest):
-        d = dest.strip()
-        self.destinations[d] = {'dst_ip': d}
-
-    def dstProcessor(self, inputfile):
-        from ipaddr import IPAddress
-
-        if os.path.isfile(inputfile):
-            with open(inputfile) as f:
-                for line in f.readlines():
-                    if line.startswith('#'):
-                        continue
-                    self.addDest(line)
-
-    def test_icmp(self):
-        def process_response(echo_reply, dest):
-           ans, unans = echo_reply
-           if ans:
-               log.msg("Recieved echo reply from %s: %s" % (dest, ans))
-           else:
-               log.msg("No reply was received from %s. Possible censorship event." % dest)
-               log.debug("Unanswered packets: %s" % unans)
-           self.report[dest] = echo_reply
-
-        for label, data in self.destinations.items():
-            reply = sr1(IP(dst=lebal)/ICMP())
-            process = process_reponse(reply, label)
-
-        #(ans, unans) = ping
-        #self.destinations[self.dst].update({'ans': ans,
-        #                                    'unans': unans,
-        #                                    'response_packet': ping})
-        #return ping
-
-        #return reply
diff --git a/data/nettests/experimental/chinatrigger.py b/data/nettests/experimental/chinatrigger.py
deleted file mode 100644
index de1f64d..0000000
--- a/data/nettests/experimental/chinatrigger.py
+++ /dev/null
@@ -1,108 +0,0 @@
-import random
-import string
-import struct
-import time
-
-from twisted.python import usage
-from ooni.templates.scapyt import BaseScapyTest
-
-class UsageOptions(usage.Options):
-    optParameters = [['dst', 'd', None, 'Specify the target address'],
-                     ['port', 'p', None, 'Specify the target port']
-                    ]
-
-class ChinaTriggerTest(BaseScapyTest):
-    """
-    This test is a OONI based implementation of the C tool written
-    by Philipp Winter to engage chinese probes in active scanning.
-
-    Example of running it:
-    ./bin/ooniprobe chinatrigger -d 127.0.0.1 -p 8080
-    """
-
-    name = "chinatrigger"
-    usageOptions = UsageOptions
-    requiredOptions = ['dst', 'port']
-    timeout = 2
-
-    def setUp(self):
-        self.dst = self.localOptions['dst']
-        self.port = int(self.localOptions['port'])
-
-    @staticmethod
-    def set_random_servername(pkt):
-        ret = pkt[:121]
-        for i in range(16):
-            ret += random.choice(string.ascii_lowercase)
-        ret += pkt[121+16:]
-        return ret
-
-    @staticmethod
-    def set_random_time(pkt):
-        ret = pkt[:11]
-        ret += struct.pack('!I', int(time.time()))
-        ret += pkt[11+4:]
-        return ret
-
-    @staticmethod
-    def set_random_field(pkt):
-        ret = pkt[:15]
-        for i in range(28):
-            ret += chr(random.randint(0, 255))
-        ret += pkt[15+28:]
-        return ret
-
-    @staticmethod
-    def mutate(pkt, idx):
-        """
-        Slightly changed mutate function.
-        """
-        ret = pkt[:idx-1]
-        mutation = chr(random.randint(0, 255))
-        while mutation == pkt[idx]:
-            mutation = chr(random.randint(0, 255))
-        ret += mutation
-        ret += pkt[idx:]
-        return ret
-
-    @staticmethod
-    def set_all_random_fields(pkt):
-        pkt = ChinaTriggerTest.set_random_servername(pkt)
-        pkt = ChinaTriggerTest.set_random_time(pkt)
-        pkt = ChinaTriggerTest.set_random_field(pkt)
-        return pkt
-
-    def test_send_mutations(self):
-        from scapy.all import IP, TCP
-        pkt = "\x16\x03\x01\x00\xcc\x01\x00\x00\xc8"\
-              "\x03\x01\x4f\x12\xe5\x63\x3f\xef\x7d"\
-              "\x20\xb9\x94\xaa\x04\xb0\xc1\xd4\x8c"\
-              "\x50\xcd\xe2\xf9\x2f\xa9\xfb\x78\xca"\
-              "\x02\xa8\x73\xe7\x0e\xa8\xf9\x00\x00"\
-              "\x3a\xc0\x0a\xc0\x14\x00\x39\x00\x38"\
-              "\xc0\x0f\xc0\x05\x00\x35\xc0\x07\xc0"\
-              "\x09\xc0\x11\xc0\x13\x00\x33\x00\x32"\
-              "\xc0\x0c\xc0\x0e\xc0\x02\xc0\x04\x00"\
-              "\x04\x00\x05\x00\x2f\xc0\x08\xc0\x12"\
-              "\x00\x16\x00\x13\xc0\x0d\xc0\x03\xfe"\
-              "\xff\x00\x0a\x00\xff\x01\x00\x00\x65"\
-              "\x00\x00\x00\x1d\x00\x1b\x00\x00\x18"\
-              "\x77\x77\x77\x2e\x67\x6e\x6c\x69\x67"\
-              "\x78\x7a\x70\x79\x76\x6f\x35\x66\x76"\
-              "\x6b\x64\x2e\x63\x6f\x6d\x00\x0b\x00"\
-              "\x04\x03\x00\x01\x02\x00\x0a\x00\x34"\
-              "\x00\x32\x00\x01\x00\x02\x00\x03\x00"\
-              "\x04\x00\x05\x00\x06\x00\x07\x00\x08"\
-              "\x00\x09\x00\x0a\x00\x0b\x00\x0c\x00"\
-              "\x0d\x00\x0e\x00\x0f\x00\x10\x00\x11"\
-              "\x00\x12\x00\x13\x00\x14\x00\x15\x00"\
-              "\x16\x00\x17\x00\x18\x00\x19\x00\x23"\
-              "\x00\x00"
-
-        pkt = ChinaTriggerTest.set_all_random_fields(pkt)
-        pkts = [IP(dst=self.dst)/TCP(dport=self.port)/pkt]
-        for x in range(len(pkt)):
-            mutation = IP(dst=self.dst)/TCP(dport=self.port)/ChinaTriggerTest.mutate(pkt, x)
-            pkts.append(mutation)
-        return self.sr(pkts, timeout=2)
-
diff --git a/data/nettests/experimental/dns_injection.py b/data/nettests/experimental/dns_injection.py
deleted file mode 100644
index 97233cf..0000000
--- a/data/nettests/experimental/dns_injection.py
+++ /dev/null
@@ -1,63 +0,0 @@
-# -*- encoding: utf-8 -*-
-from twisted.python import usage
-from twisted.internet import defer
-
-from ooni.templates import dnst
-from ooni import nettest
-from ooni.utils import log
-
-class UsageOptions(usage.Options):
-    optParameters = [
-            ['resolver', 'r', '8.8.8.1', 'an invalid DNS resolver'],
-            ['timeout', 't', 3, 'timeout after which we should consider the query failed']
-    ]
-
-class DNSInjectionTest(dnst.DNSTest):
-    """
-    This test detects DNS spoofed DNS responses by performing UDP based DNS
-    queries towards an invalid DNS resolver.
-
-    For it to work we must be traversing the network segment of a machine that
-    is actively injecting DNS query answers.
-    """
-    name = "DNS Injection"
-    description = "Checks for injection of spoofed DNS answers"
-    version = "0.1"
-    authors = "Arturo Filastò"
-
-    inputFile = ['file', 'f', None,
-                 'Input file of list of hostnames to attempt to resolve']
-
-    usageOptions = UsageOptions
-    requiredOptions = ['resolver', 'file']
-
-    def setUp(self):
-        self.resolver = (self.localOptions['resolver'], 53)
-        self.queryTimeout = [self.localOptions['timeout']]
-
-    def inputProcessor(self, filename):
-        fp = open(filename)
-        for line in fp:
-            if line.startswith('http://'):
-                yield line.replace('http://', '').replace('/', '').strip()
-            else:
-                yield line.strip()
-        fp.close()
-
-    def test_injection(self):
-        self.report['injected'] = None
-
-        d = self.performALookup(self.input, self.resolver)
-        @d.addCallback
-        def cb(res):
-            log.msg("The DNS query for %s is injected" % self.input)
-            self.report['injected'] = True
-
-        @d.addErrback
-        def err(err):
-            err.trap(defer.TimeoutError)
-            log.msg("The DNS query for %s is not injected" % self.input)
-            self.report['injected'] = False
-
-        return d
-
diff --git a/data/nettests/experimental/domclass_collector.py b/data/nettests/experimental/domclass_collector.py
deleted file mode 100644
index c1866f2..0000000
--- a/data/nettests/experimental/domclass_collector.py
+++ /dev/null
@@ -1,33 +0,0 @@
-# -*- encoding: utf-8 -*-
-#
-# The purpose of this collector is to compute the eigenvector for the input
-# file containing a list of sites.
-#
-#
-# :authors: Arturo Filastò
-# :licence: see LICENSE
-
-from twisted.internet import threads, defer
-
-from ooni.kit import domclass
-from ooni.templates import httpt
-
-class DOMClassCollector(httpt.HTTPTest):
-    name = "DOM class collector"
-    author = "Arturo Filastò"
-    version = 0.1
-
-    followRedirects = True
-
-    inputFile = ['file', 'f', None, 'The list of urls to build a domclass for']
-
-    def test_collect(self):
-        if self.input:
-            url = self.input
-            return self.doRequest(url)
-        else:
-            raise Exception("No input specified")
-
-    def processResponseBody(self, body):
-        eigenvalues = domclass.compute_eigenvalues_from_DOM(content=body)
-        self.report['eigenvalues'] = eigenvalues.tolist()
diff --git a/data/nettests/experimental/http_filtering_bypassing.py b/data/nettests/experimental/http_filtering_bypassing.py
deleted file mode 100644
index dc103db..0000000
--- a/data/nettests/experimental/http_filtering_bypassing.py
+++ /dev/null
@@ -1,84 +0,0 @@
-# -*- encoding: utf-8 -*-
-from twisted.python import usage
-
-from ooni.utils import log
-from ooni.utils import randomStr, randomSTR
-from ooni.templates import tcpt
-
-class UsageOptions(usage.Options):
-    optParameters = [['backend', 'b', '127.0.0.1',
-                        'The OONI backend that runs a TCP echo server'],
-                    ['backendport', 'p', 80, 'Specify the port that the TCP echo server is running (should only be set for debugging)']]
-
-class HTTPFilteringBypass(tcpt.TCPTest):
-    name = "HTTPFilteringBypass"
-    version = "0.1"
-    authors = "xx"
-
-    inputFile = ['file', 'f', None,
-            'Specify a list of hostnames to use as inputs']
-
-    usageOptions = UsageOptions
-    requiredOptions = ['backend']
-
-    def setUp(self):
-        self.port = int(self.localOptions['backendport'])
-        self.address = self.localOptions['backend']
-
-    def check_for_manipulation(self, response, payload):
-        log.debug("Checking if %s == %s" % (response, payload))
-        if response != payload:
-            self.report['tampering'] = True
-        else:
-            self.report['tampering'] = False
-
-    def test_prepend_newline(self):
-        payload = "\nGET / HTTP/1.1\n\r"
-        payload += "Host: %s\n\r" % self.input
-
-        d = self.sendPayload(payload)
-        d.addCallback(self.check_for_manipulation, payload)
-        return d
-
-    def test_tab_trick(self):
-        payload = "GET / HTTP/1.1\n\r"
-        payload += "Host: %s\t\n\r" % self.input
-
-        d = self.sendPayload(payload)
-        d.addCallback(self.check_for_manipulation, payload)
-        return d
-
-    def test_subdomain_blocking(self):
-        payload = "GET / HTTP/1.1\n\r"
-        payload += "Host: %s\n\r" % randomStr(10) + '.' + self.input
-
-        d = self.sendPayload(payload)
-        d.addCallback(self.check_for_manipulation, payload)
-        return d
-
-    def test_fuzzy_domain_blocking(self):
-        hostname_field = randomStr(10) + '.' + self.input + '.' + randomStr(10)
-        payload = "GET / HTTP/1.1\n\r"
-        payload += "Host: %s\n\r" % hostname_field
-
-        d = self.sendPayload(payload)
-        d.addCallback(self.check_for_manipulation, payload)
-        return d
-
-    def test_fuzzy_match_blocking(self):
-        hostname_field = randomStr(10) + self.input + randomStr(10)
-        payload = "GET / HTTP/1.1\n\r"
-        payload += "Host: %s\n\r" % hostname_field
-
-        d = self.sendPayload(payload)
-        d.addCallback(self.check_for_manipulation, payload)
-        return d
-
-    def test_normal_request(self):
-        payload = "GET / HTTP/1.1\n\r"
-        payload += "Host: %s\n\r" % self.input
-
-        d = self.sendPayload(payload)
-        d.addCallback(self.check_for_manipulation, payload)
-        return d
-
diff --git a/data/nettests/experimental/http_keyword_filtering.py b/data/nettests/experimental/http_keyword_filtering.py
deleted file mode 100644
index 0ae9c52..0000000
--- a/data/nettests/experimental/http_keyword_filtering.py
+++ /dev/null
@@ -1,45 +0,0 @@
-# -*- encoding: utf-8 -*-
-#
-# :authors: Arturo Filastò
-# :licence: see LICENSE
-
-from twisted.python import usage
-
-from ooni.templates import httpt
-
-class UsageOptions(usage.Options):
-    optParameters = [['backend', 'b', 'http://127.0.0.1:57001',
-                        'URL of the test backend to use']]
-
-class HTTPKeywordFiltering(httpt.HTTPTest):
-    """
-    This test involves performing HTTP requests containing to be tested for
-    censorship keywords.
-
-    It does not detect censorship on the client, but just logs the response from the 
-    HTTP backend server.
-    """
-    name = "HTTP Keyword Filtering"
-    author = "Arturo Filastò"
-    version = "0.1.1"
-
-    inputFile = ['file', 'f', None, 'List of keywords to use for censorship testing']
-
-    usageOptions = UsageOptions
-
-    requiredOptions = ['backend']
-
-    def test_get(self):
-        """
-        Perform a HTTP GET request to the backend containing the keyword to be
-        tested inside of the request body.
-        """
-        return self.doRequest(self.localOptions['backend'], method="GET", body=self.input)
-
-    def test_post(self):
-        """
-        Perform a HTTP POST request to the backend containing the keyword to be
-        tested inside of the request body.
-        """
-        return self.doRequest(self.localOptions['backend'], method="POST", body=self.input)
-
diff --git a/data/nettests/experimental/http_trix.py b/data/nettests/experimental/http_trix.py
deleted file mode 100644
index 85a4ba2..0000000
--- a/data/nettests/experimental/http_trix.py
+++ /dev/null
@@ -1,47 +0,0 @@
-# -*- encoding: utf-8 -*-
-from twisted.python import usage
-
-from ooni.utils import log
-from ooni.utils import randomStr, randomSTR
-from ooni.templates import tcpt
-
-class UsageOptions(usage.Options):
-    optParameters = [['backend', 'b', '127.0.0.1',
-                        'The OONI backend that runs a TCP echo server'],
-                    ['backendport', 'p', 80, 'Specify the port that the TCP echo server is running (should only be set for debugging)']]
-
-class HTTPTrix(tcpt.TCPTest):
-    name = "HTTPTrix"
-    version = "0.1"
-    authors = "Arturo Filastò"
-
-    usageOptions = UsageOptions
-    requiredOptions = ['backend']
-
-    def setUp(self):
-        self.port = int(self.localOptions['backendport'])
-        self.address = self.localOptions['backend']
-
-    def check_for_manipulation(self, response, payload):
-        log.debug("Checking if %s == %s" % (response, payload))
-        if response != payload:
-            self.report['tampering'] = True
-        else:
-            self.report['tampering'] = False
-
-    def test_for_squid_cache_object(self):
-        """
-        This detects the presence of a squid transparent HTTP proxy by sending
-        a request for cache_object://localhost/info.
-
-        This tests for the presence of a Squid Transparent proxy by sending:
-
-            GET cache_object://localhost/info HTTP/1.1
-        """
-        payload = 'GET cache_object://localhost/info HTTP/1.1'
-        payload += '\n\r'
-
-        d = self.sendPayload(payload)
-        d.addCallback(self.check_for_manipulation, payload)
-        return d
-
diff --git a/data/nettests/experimental/http_uk_mobile_networks.py b/data/nettests/experimental/http_uk_mobile_networks.py
deleted file mode 100644
index 784a9e9..0000000
--- a/data/nettests/experimental/http_uk_mobile_networks.py
+++ /dev/null
@@ -1,85 +0,0 @@
-# -*- encoding: utf-8 -*-
-import yaml
-
-from twisted.python import usage
-from twisted.plugin import IPlugin
-
-from ooni.templates import httpt
-from ooni.utils import log
-
-class UsageOptions(usage.Options):
-    """
-    See https://github.com/hellais/ooni-inputs/processed/uk_mobile_networks_redirects.yaml 
-    to see how the rules file should look like.
-    """
-    optParameters = [
-                     ['rules', 'y', None, 
-                    'Specify the redirect rules file ']
-                    ]
-
-class HTTPUKMobileNetworksTest(httpt.HTTPTest):
-    """
-    This test was thought of by Open Rights Group and implemented with the
-    purpose of detecting censorship in the UK.
-    For more details on this test see:
-    https://trac.torproject.org/projects/tor/ticket/6437
-    XXX port the knowledge from the trac ticket into this test docstring
-    """
-    name = "HTTP UK mobile network redirect test"
-
-    usageOptions = UsageOptions
-
-    followRedirects = True
-
-    inputFile = ['urls', 'f', None, 'List of urls one per line to test for censorship']
-    requiredOptions = ['urls']
-
-    def testPattern(self, value, pattern, type):
-        if type == 'eq':
-            return value == pattern
-        elif type == 're':
-            import re
-            if re.match(pattern, value):
-                return True
-            else:
-                return False
-        else:
-            return None
-
-    def testPatterns(self, patterns, location):
-        test_result = False
-
-        if type(patterns) == list:
-            for pattern in patterns:
-                test_result |= self.testPattern(location, pattern['value'], pattern['type'])
-        rules_file = self.localOptions['rules']
-
-        return test_result
-
-    def testRules(self, rules, location):
-        result = {}
-        blocked = False
-        for rule, value in rules.items():
-            current_rule = {}
-            current_rule['name'] = value['name']
-            current_rule['patterns'] = value['patterns']
-            current_rule['test'] = self.testPatterns(value['patterns'], location)
-            blocked |= current_rule['test']
-            result[rule] = current_rule
-        result['blocked'] = blocked
-        return result
-
-    def processRedirect(self, location):
-        self.report['redirect'] = None
-        rules_file = self.localOptions['rules']
-
-        fp = open(rules_file)
-        rules = yaml.safe_load(fp)
-        fp.close()
-
-        log.msg("Testing rules %s" % rules)
-        redirect = self.testRules(rules, location)
-        self.report['redirect'] = redirect
-
-
-
diff --git a/data/nettests/experimental/keyword_filtering.py b/data/nettests/experimental/keyword_filtering.py
deleted file mode 100644
index 9eec4ff..0000000
--- a/data/nettests/experimental/keyword_filtering.py
+++ /dev/null
@@ -1,52 +0,0 @@
-# -*- encoding: utf-8 -*-
-#
-# :authors: Arturo Filastò
-# :licence: see LICENSE
-
-from twisted.python import usage
-from twisted.internet import defer
-
-from ooni.utils import log
-from ooni.templates import scapyt
-
-from scapy.all import *
-
-class UsageOptions(usage.Options):
-    optParameters = [
-                    ['backend', 'b', '127.0.0.1:57002', 'Test backend running TCP echo'],
-                    ['timeout', 't', 5, 'Timeout after which to give up waiting for RST packets']
-                    ]
-
-class KeywordFiltering(scapyt.BaseScapyTest):
-    name = "Keyword Filtering detection based on RST packets"
-    author = "Arturo Filastò"
-    version = "0.1"
-
-    usageOptions = UsageOptions
-
-    inputFile = ['file', 'f', None, 
-            'List of keywords to use for censorship testing']
-
-    def test_tcp_keyword_filtering(self):
-        """
-        Places the keyword to be tested in the payload of a TCP packet.
-        XXX need to implement bisection method for enumerating keywords.
-            though this should not be an issue since we are testing all 
-            the keywords in parallel.
-        """
-        def finished(packets):
-            log.debug("Finished running TCP traceroute test on port %s" % port)
-            answered, unanswered = packets
-            self.report['rst_packets'] = []
-            for snd, rcv in answered:
-                # The received packet has the RST flag
-                if rcv[TCP].flags == 4:
-                    self.report['rst_packets'].append(rcv)
-
-        backend_ip, backend_port = self.localOptions['backend']
-        keyword_to_test = str(self.input)
-        packets = IP(dst=backend_ip,id=RandShort())/TCP(dport=backend_port)/keyword_to_test
-        d = self.sr(packets, timeout=timeout)
-        d.addCallback(finished)
-        return d
-
diff --git a/data/nettests/experimental/parasitictraceroute.py b/data/nettests/experimental/parasitictraceroute.py
deleted file mode 100644
index 631c24b..0000000
--- a/data/nettests/experimental/parasitictraceroute.py
+++ /dev/null
@@ -1,129 +0,0 @@
-# -*- encoding: utf-8 -*-
-#
-# :authors: Arturo Filastò
-# :licence: see LICENSE
-
-from twisted.python import usage
-from twisted.internet import defer
-
-from ooni.templates import scapyt
-
-from scapy.all import *
-
-from ooni.utils import log
-
-class UsageOptions(usage.Options):
-    optParameters = [['backend', 'b', 'google.com', 'Test backend to use'],
-                    ['timeout', 't', 5, 'The timeout for the traceroute test'],
-                    ['maxttl', 'm', 64, 'The maximum value of ttl to set on packets'],
-                    ['dstport', 'd', 80, 'Set the destination port of the traceroute test'],
-                    ['srcport', 'p', None, 'Set the source port to a specific value']]
-
-class ParasiticalTracerouteTest(scapyt.BaseScapyTest):
-    name = "Parasitic TCP Traceroute Test"
-    author = "Arturo Filastò"
-    version = "0.1"
-
-    usageOptions = UsageOptions
-
-    def setUp(self):
-        def get_sport():
-            if self.localOptions['srcport']:
-                return int(self.localOptions['srcport'])
-            else:
-                return random.randint(1024, 65535)
-        self.get_sport = get_sport
-
-        self.dst_ip = socket.gethostbyaddr(self.localOptions['backend'])[2][0]
-
-        self.dport = int(self.localOptions['dstport'])
-        self.max_ttl = int(self.localOptions['maxttl'])
-
-    @defer.inlineCallbacks
-    def test_parasitic_tcp_traceroute(self):
-        """
-        Establishes a TCP stream, then sequentially sends TCP packets with
-        increasing TTL until we reach the ttl of the destination.
-
-        Requires the backend to respond with an ACK to our SYN packet (i.e.
-        the port must be open)
-
-        XXX this currently does not work properly. The problem lies in the fact
-        that we are currently using the scapy layer 3 socket. This socket makes
-        packets received be trapped by the kernel TCP stack, therefore when we
-        send out a SYN and get back a SYN-ACK the kernel stack will reply with
-        a RST because it did not send a SYN.
-
-        The quick fix to this would be to establish a TCP stream using socket
-        calls and then "cannibalizing" the TCP session with scapy.
-
-        The real fix is to make scapy use libpcap instead of raw sockets
-        obviously as we previously did... arg.
-        """
-        sport = self.get_sport()
-        dport = self.dport
-        ipid = int(RandShort())
-
-        ip_layer = IP(dst=self.dst_ip,
-                id=ipid, ttl=self.max_ttl)
-
-        syn = ip_layer/TCP(sport=sport, dport=dport, flags="S", seq=0)
-
-        log.msg("Sending...")
-        syn.show2()
-
-        synack = yield self.sr1(syn)
-
-        log.msg("Got response...")
-        synack.show2()
-
-        if not synack:
-            log.err("Got no response. Try increasing max_ttl")
-            return
-
-        if synack[TCP].flags == 11:
-            log.msg("Got back a FIN ACK. The destination port is closed")
-            return
-
-        elif synack[TCP].flags == 18:
-            log.msg("Got a SYN ACK. All is well.")
-        else:
-            log.err("Got an unexpected result")
-            return
-
-        ack = ip_layer/TCP(sport=synack.dport,
-                            dport=dport, flags="A",
-                            seq=synack.ack, ack=synack.seq + 1)
-
-        yield self.send(ack)
-
-        self.report['hops'] = []
-        # For the time being we make the assumption that we are NATted and
-        # that the NAT will forward the packet to the destination even if the TTL has 
-        for ttl in range(1, self.max_ttl):
-            log.msg("Sending packet with ttl of %s" % ttl)
-            ip_layer.ttl = ttl
-            empty_tcp_packet = ip_layer/TCP(sport=synack.dport,
-                    dport=dport, flags="A",
-                    seq=synack.ack, ack=synack.seq + 1)
-
-            answer = yield self.sr1(empty_tcp_packet)
-            if not answer:
-                log.err("Got no response for ttl %s" % ttl)
-                continue
-
-            try:
-                icmp = answer[ICMP]
-                report = {'ttl': empty_tcp_packet.ttl,
-                    'address': answer.src,
-                    'rtt': answer.time - empty_tcp_packet.time
-                }
-                log.msg("%s: %s" % (dport, report))
-                self.report['hops'].append(report)
-
-            except IndexError:
-                if answer.src == self.dst_ip:
-                    answer.show()
-                    log.msg("Reached the destination. We have finished the traceroute")
-                    return
-
diff --git a/data/nettests/experimental/script.py b/data/nettests/experimental/script.py
deleted file mode 100644
index 4772f65..0000000
--- a/data/nettests/experimental/script.py
+++ /dev/null
@@ -1,90 +0,0 @@
-from ooni import nettest
-from ooni.utils import log
-from twisted.internet import defer, protocol, reactor
-from twisted.python import usage
-
-import os
-
-
-def which(program):
-    def is_exe(fpath):
-        return os.path.isfile(fpath) and os.access(fpath, os.X_OK)
-
-    fpath, fname = os.path.split(program)
-    if fpath:
-        if is_exe(program):
-            return program
-    else:
-        for path in os.environ["PATH"].split(os.pathsep):
-            path = path.strip('"')
-            exe_file = os.path.join(path, program)
-            if is_exe(exe_file):
-                return exe_file
-    return None
-
-
-class UsageOptions(usage.Options):
-    optParameters = [
-        ['interpreter', 'i', '', 'The interpreter to use'],
-        ['script', 's', '', 'The script to run']
-    ]
-
-
-class ScriptProcessProtocol(protocol.ProcessProtocol):
-    def __init__(self, test_case):
-        self.test_case = test_case
-        self.deferred = defer.Deferred()
-
-    def connectionMade(self):
-        log.debug("connectionMade")
-        self.transport.closeStdin()
-        self.test_case.report['lua_output'] = ""
-
-    def outReceived(self, data):
-        log.debug('outReceived: %s' % data)
-        self.test_case.report['lua_output'] += data
-
-    def errReceived(self, data):
-        log.err('Script error: %s' % data)
-        self.transport.signalProcess('KILL')
-
-    def processEnded(self, status):
-        rc = status.value.exitCode
-        log.debug('processEnded: %s, %s' % \
-                  (rc, self.test_case.report['lua_output']))
-        if rc == 0:
-            self.deferred.callback(self)
-        else:
-            self.deferred.errback(rc)
-
-
-# TODO: Maybe the script requires a back-end.
-class Script(nettest.NetTestCase):
-    name = "Script test"
-    version = "0.1"
-    authors = "Dominic Hamon"
-
-    usageOptions = UsageOptions
-    requiredOptions = ['interpreter', 'script']
-
-    def test_run_script(self):
-        """
-        We run the script specified in the usage options and take whatever
-        is printed to stdout as the results of the test.
-        """
-        processProtocol = ScriptProcessProtocol(self)
-
-        interpreter = self.localOptions['interpreter']
-        if not which(interpreter):
-            log.err('Unable to find %s executable in PATH.' % interpreter)
-            return
-
-        reactor.spawnProcess(processProtocol,
-                             interpreter,
-                             args=[interpreter, self.localOptions['script']],
-                             env={'HOME': os.environ['HOME']},
-                             usePTY=True)
-
-        if not reactor.running:
-            reactor.run()
-        return processProtocol.deferred
diff --git a/data/nettests/experimental/squid.py b/data/nettests/experimental/squid.py
deleted file mode 100644
index 777bc3e..0000000
--- a/data/nettests/experimental/squid.py
+++ /dev/null
@@ -1,117 +0,0 @@
-# -*- encoding: utf-8 -*-
-#
-# Squid transparent HTTP proxy detector
-# *************************************
-#
-# :authors: Arturo Filastò
-# :licence: see LICENSE
-
-from ooni import utils
-from ooni.utils import log
-from ooni.templates import httpt
-
-class SquidTest(httpt.HTTPTest):
-    """
-    This test aims at detecting the presence of a squid based transparent HTTP
-    proxy. It also tries to detect the version number.
-    """
-    name = "Squid test"
-    author = "Arturo Filastò"
-    version = "0.1"
-
-    optParameters = [['backend', 'b', 'http://ooni.nu/test/', 'Test backend to use']]
-
-    #inputFile = ['urls', 'f', None, 'Urls file']
-    inputs =['http://google.com']
-    def test_cacheobject(self):
-        """
-        This detects the presence of a squid transparent HTTP proxy by sending
-        a request for cache_object://localhost/info.
-
-        The response to this request will usually also contain the squid
-        version number.
-        """
-        log.debug("Running")
-        def process_body(body):
-            if "Access Denied." in body:
-                self.report['transparent_http_proxy'] = True
-            else:
-                self.report['transparent_http_proxy'] = False
-
-        log.msg("Testing Squid proxy presence by sending a request for "\
-                "cache_object")
-        headers = {}
-        #headers["Host"] = [self.input]
-        self.report['trans_http_proxy'] = None
-        method = "GET"
-        body = "cache_object://localhost/info"
-        return self.doRequest(self.localOptions['backend'], method=method, body=body,
-                        headers=headers, body_processor=process_body)
-
-    def test_search_bad_request(self):
-        """
-        Attempts to perform a request with a random invalid HTTP method.
-
-        If we are being MITMed by a Transparent Squid HTTP proxy we will get
-        back a response containing the X-Squid-Error header.
-        """
-        def process_headers(headers):
-            log.debug("Processing headers in test_search_bad_request")
-            if 'X-Squid-Error' in headers:
-                log.msg("Detected the presence of a transparent HTTP "\
-                        "squid proxy")
-                self.report['trans_http_proxy'] = True
-            else:
-                log.msg("Did not detect the presence of transparent HTTP "\
-                        "squid proxy")
-                self.report['transparent_http_proxy'] = False
-
-        log.msg("Testing Squid proxy presence by sending a random bad request")
-        headers = {}
-        #headers["Host"] = [self.input]
-        method = utils.randomSTR(10, True)
-        self.report['transparent_http_proxy'] = None
-        return self.doRequest(self.localOptions['backend'], method=method,
-                        headers=headers, headers_processor=process_headers)
-
-    def test_squid_headers(self):
-        """
-        Detects the presence of a squid transparent HTTP proxy based on the
-        response headers it adds to the responses to requests.
-        """
-        def process_headers(headers):
-            """
-            Checks if any of the headers that squid is known to add match the
-            squid regexp.
-
-            We are looking for something that looks like this:
-
-                via: 1.0 cache_server:3128 (squid/2.6.STABLE21)
-                x-cache: MISS from cache_server
-                x-cache-lookup: MISS from cache_server:3128
-            """
-            squid_headers = {'via': r'.* \((squid.*)\)',
-                        'x-cache': r'MISS from (\w+)',
-                        'x-cache-lookup': r'MISS from (\w+:?\d+?)'
-                        }
-
-            self.report['transparent_http_proxy'] = False
-            for key in squid_headers.keys():
-                if key in headers:
-                    log.debug("Found %s in headers" % key)
-                    m = re.search(squid_headers[key], headers[key])
-                    if m:
-                        log.msg("Detected the presence of squid transparent"\
-                                " HTTP Proxy")
-                        self.report['transparent_http_proxy'] = True
-
-        log.msg("Testing Squid proxy by looking at response headers")
-        headers = {}
-        #headers["Host"] = [self.input]
-        method = "GET"
-        self.report['transparent_http_proxy'] = None
-        d = self.doRequest(self.localOptions['backend'], method=method,
-                        headers=headers, headers_processor=process_headers)
-        return d
-
-
diff --git a/data/nettests/experimental/tls_handshake.py b/data/nettests/experimental/tls_handshake.py
deleted file mode 100644
index 5da2e8b..0000000
--- a/data/nettests/experimental/tls_handshake.py
+++ /dev/null
@@ -1,809 +0,0 @@
-#!/usr/bin/env python
-# -*- encoding: utf-8 -*-
-"""
-  tls_handshake.py
-  ----------------
-
-  This file contains test cases for determining if a TLS handshake completes
-  successfully, including ways to test if a TLS handshake which uses Mozilla
-  Firefox's current ciphersuite list completes. Rather than using Twisted and
-  OpenSSL's methods for automatically completing a handshake, which includes
-  setting all the parameters, such as the ciphersuite list, these tests use
-  non-blocking sockets and implement asychronous error-handling transversal of
-  OpenSSL's memory BIO state machine, allowing us to determine where and why a
-  handshake fails.
-
-  This network test is a complete rewrite of a pseudonymously contributed
-  script by Hackerberry Finn, in order to fit into OONI's core network tests.
-
-  @authors: Isis Agora Lovecruft <isis@xxxxxxxxxxxxxx>
-  @license: see included LICENSE file
-  @copyright: © 2013 Isis Lovecruft, The Tor Project Inc.
-"""
-
-from socket import error   as socket_error
-from socket import timeout as socket_timeout
-from time   import sleep
-
-import os
-import socket
-import struct
-import sys
-import types
-
-import ipaddr
-import OpenSSL
-
-from OpenSSL                import SSL, crypto
-from twisted.internet       import defer, threads
-from twisted.python         import usage, failure
-
-from ooni       import nettest, config
-from ooni.utils import log
-from ooni.errors import InsufficientPrivileges
-
-## For a way to obtain the current version of Firefox's default ciphersuite
-## list, see https://trac.torproject.org/projects/tor/attachment/ticket/4744/
-## and the attached file "get_mozilla_files.py".
-##
-## Note, however, that doing so requires the source code to the version of
-## firefox that you wish to emulate.
-
-firefox_ciphers = ["ECDHE-ECDSA-AES256-SHA",
-                   "ECDHE-RSA-AES256-SHA",
-                   "DHE-RSA-CAMELLIA256-SHA",
-                   "DHE-DSS-CAMELLIA256-SHA",
-                   "DHE-RSA-AES256-SHA",
-                   "DHE-DSS-AES256-SHA",
-                   "ECDH-ECDSA-AES256-CBC-SHA",
-                   "ECDH-RSA-AES256-CBC-SHA",
-                   "CAMELLIA256-SHA",
-                   "AES256-SHA",
-                   "ECDHE-ECDSA-RC4-SHA",
-                   "ECDHE-ECDSA-AES128-SHA",
-                   "ECDHE-RSA-RC4-SHA",
-                   "ECDHE-RSA-AES128-SHA",
-                   "DHE-RSA-CAMELLIA128-SHA",
-                   "DHE-DSS-CAMELLIA128-SHA",]
-
-
-class SSLContextError(usage.UsageError):
-    """Raised when we're missing the SSL context method, or incompatible
-    contexts were provided. The SSL context method should be one of the
-    following:
-
-        :attr:`OpenSSL.SSL.SSLv2_METHOD <OpenSSL.SSL.SSLv2_METHOD>`
-        :attr:`OpenSSL.SSL.SSLv23_METHOD <OpenSSL.SSL.SSLv23_METHOD>`
-        :attr:`OpenSSL.SSL.SSLv3_METHOD <OpenSSL.SSL.SSLv3_METHOD>`
-        :attr:`OpenSSL.SSL.TLSv1_METHOD <OpenSSL.SSL.TLSv1_METHOD>`
-
-    To use the pre-defined error messages, construct with one of the
-    :meth:`SSLContextError.errors.keys <keys>` as the ``message`` string, like
-    so:
-
-        ``SSLContextError('NO_CONTEXT')``
-    """
-
-    #: Pre-defined error messages.
-    errors = {
-        'NO_CONTEXT': 'No SSL/TLS context chosen! Defaulting to TLSv1.',
-        'INCOMPATIBLE': str("Testing TLSv1 (option '--tls1') is incompatible "
-                            + "with testing SSL ('--ssl2' and '--ssl3')."),
-        'MISSING_SSLV2': str("Your version of OpenSSL was compiled without "
-                             + "support for SSLv2. This is normal on newer "
-                             + "versions of OpenSSL, but it means that you "
-                             + "will be unable to test SSLv2 handshakes "
-                             + "without recompiling OpenSSL."), }
-
-    def __init__(self, message):
-        if message in self.errors.keys():
-            message = self.errors[message]
-        super(usage.UsageError, self).__init__(message)
-
-class HostUnreachable(Exception):
-    """Raised when the host IP address appears to be unreachable."""
-    pass
-
-class ConnectionTimeout(Exception):
-    """Raised when we receive a :class:`socket.timeout <timeout>`, in order to
-    pass the Exception along to
-    :func:`TLSHandshakeTest.test_handshake.connectionFailed
-    <connectionFailed>`.
-    """
-    pass
-
-class HandshakeOptions(usage.Options):
-    """ :class:`usage.Options <Options>` parser for the tls-handshake test."""
-    optParameters = [
-        ['host', 'h', None,
-         'Remote host IP address (v4/v6) and port, i.e. "1.2.3.4:443"'],
-        ['port', 'p', None,
-         'Use this port for all hosts, regardless of port specified in file'],
-        ['ciphersuite', 'c', None ,
-         'File containing ciphersuite list, one per line'],]
-    optFlags = [
-        ['ssl2', '2', 'Use SSLv2'],
-        ['ssl3', '3', 'Use SSLv3'],
-        ['tls1', 't', 'Use TLSv1'],]
-
-class HandshakeTest(nettest.NetTestCase):
-    """An ooniprobe NetTestCase for determining if we can complete a TLS/SSL
-    handshake with a remote host.
-    """
-    name         = 'tls-handshake'
-    author       = 'Isis Lovecruft <isis@xxxxxxxxxxxxxx>'
-    description  = 'A test to determing if we can complete a TLS hankshake.'
-    version      = '0.0.3'
-
-    requiresRoot = False
-    usageOptions = HandshakeOptions
-
-    host = None
-    inputFile = ['file', 'f', None, 'List of <IP>:<PORT>s to test']
-
-    #: Default SSL/TLS context method.
-    context = SSL.Context(SSL.TLSv1_METHOD)
-
-    def setUp(self, *args, **kwargs):
-        """Set defaults for a :class:`HandshakeTest <HandshakeTest>`."""
-
-        self.ciphers = list()
-
-        if self.localOptions:
-            options = self.localOptions
-
-            ## check that we're testing an IP:PORT, else exit gracefully:
-            if not (options['host']  or options['file']):
-                raise SystemExit("Need --host or --file!")
-            if options['host']:
-                self.host = options['host']
-
-            ## If no context was chosen, explain our default to the user:
-            if not (options['ssl2'] or options['ssl3'] or options['tls1']):
-                try: raise SSLContextError('NO_CONTEXT')
-                except SSLContextError as sce: log.err(sce.message)
-            else:
-                ## If incompatible contexts were chosen, inform the user:
-                if options['tls1'] and (options['ssl2'] or options['ssl3']):
-                    try: raise SSLContextError('INCOMPATIBLE')
-                    except SSLContextError as sce: log.err(sce.message)
-                    finally: log.msg('Defaulting to testing only TLSv1.')
-                elif options['ssl2']:
-                    try:
-                        if not options['ssl3']:
-                            context = SSL.Context(SSL.SSLv2_METHOD)
-                        else:
-                            context = SSL.Context(SSL.SSLv23_METHOD)
-                    except ValueError as ve:
-                        log.err(ve.message)
-                        try: raise SSLContextError('MISSING_SSLV2')
-                        except SSLContextError as sce:
-                            log.err(sce.message)
-                            log.msg("Falling back to testing only TLSv1.")
-                            context = SSL.Context(SSL.TLSv1_METHOD)
-                elif options['ssl3']:
-                    context = SSL.Context(SSL.SSLv3_METHOD)
-            ## finally, reset the context if the user's choice was okay:
-            if context: self.context = context
-
-            ## if we weren't given a file with a list of ciphersuites to use,
-            ## then use the firefox default list:
-            if not options['ciphersuite']:
-                self.ciphers = firefox_ciphers
-                log.msg('Using default Firefox ciphersuite list.')
-            else:
-                if os.path.isfile(options['ciphersuite']):
-                    log.msg('Using ciphersuite list from "%s"'
-                            % options['ciphersuite'])
-                    with open(options['ciphersuite']) as cipherfile:
-                        for line in cipherfile.readlines():
-                            self.ciphers.append(line.strip())
-            self.ciphersuite = ":".join(self.ciphers)
-
-        if getattr(config.advanced, 'default_timeout', None) is not None:
-            self.timeout = config.advanced.default_timeout
-        else:
-            self.timeout = 30   ## default the timeout to 30 seconds
-
-        ## xxx For debugging, set the socket timeout higher anyway:
-        self.timeout = 30
-
-        ## We have to set the default timeout on our sockets before creation:
-        socket.setdefaulttimeout(self.timeout)
-
-    def splitInput(self, input):
-        addr, port = input.strip().rsplit(':', 1)
-        if self.localOptions['port']:
-            port = self.localOptions['port']
-        return (str(addr), int(port))
-
-    def inputProcessor(self, file=None):
-        if self.host:
-            yield self.splitInput(self.host)
-        if os.path.isfile(file):
-            with open(file) as fh:
-                for line in fh.readlines():
-                    if line.startswith('#'):
-                        continue
-                    yield self.splitInput(line)
-
-    def buildSocket(self, addr):
-        global s
-        ip = ipaddr.IPAddress(addr) ## learn if we're IPv4 or IPv6
-        if ip.version == 4:
-            s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
-        elif ip.version == 6:
-            s = socket.socket(socket.AF_INET6, socket.SOCK_STREAM)
-        return s
-
-    def getContext(self):
-        self.context.set_cipher_list(self.ciphersuite)
-        return self.context
-
-    @staticmethod
-    def getPeerCert(connection, get_chain=False):
-        """Get the PEM-encoded certificate or cert chain of the remote host.
-
-        :param connection: A :class:`OpenSSL.SSL.Connection <Connection>`.
-        :param bool get_chain: If True, get the all certificates in the
-            chain. Otherwise, only get the remote host's certificate.
-        :returns: A PEM-encoded x509 certificate. If
-            :param:`getPeerCert.get_chain <get_chain>` is True, returns a list
-            of PEM-encoded x509 certificates.
-        """
-        if not get_chain:
-            x509_cert = connection.get_peer_certificate()
-            pem_cert = crypto.dump_certificate(crypto.FILETYPE_PEM, x509_cert)
-            return pem_cert
-        else:
-            cert_chain = []
-            x509_cert_chain = connection.get_peer_cert_chain()
-            for x509_cert in x509_cert_chain:
-                pem_cert = crypto.dump_certificate(crypto.FILETYPE_PEM,
-                                                   x509_cert)
-                cert_chain.append(pem_cert)
-            return cert_chain
-
-    @staticmethod
-    def getX509Name(certificate, get_components=False):
-        """Get the DER-encoded form of the Name fields of an X509 certificate.
-
-        @param certificate: A :class:`OpenSSL.crypto.X509Name` object.
-        @param get_components: A boolean. If True, returns a list of tuples of
-                               the (name, value)s of each Name field in the
-                               :param:`certificate`. If False, returns the DER
-                               encoded form of the Name fields of the
-                               :param:`certificate`.
-        """
-        x509_name = None
-
-        try:
-            assert isinstance(certificate, crypto.X509Name), \
-                "getX509Name takes OpenSSL.crypto.X509Name as first argument!"
-            x509_name = crypto.X509Name(certificate)
-        except AssertionError as ae:
-            log.err(ae)
-        except Exception as exc:
-            log.exception(exc)
-
-        if not x509_name is None:
-            if not get_components:
-                return x509_name.der()
-            else:
-                return x509_name.get_components()
-        else:
-            log.debug("getX509Name: got None for ivar x509_name")
-
-    @staticmethod
-    def getPublicKey(key):
-        """Get the PEM-encoded format of a host certificate's public key.
-
-        :param key: A :class:`OpenSSL.crypto.PKey <crypto.PKey>` object.
-        """
-        try:
-            assert isinstance(key, crypto.PKey), \
-                "getPublicKey expects type OpenSSL.crypto.PKey for parameter key"
-        except AssertionError as ae:
-            log.err(ae)
-        else:
-            pubkey = crypto.dump_privatekey(crypto.FILETYPE_PEM, key)
-            return pubkey
-
-    def test_handshake(self):
-        """xxx fill me in"""
-
-        def makeConnection(host):
-            """Create a socket to the remote host's IP address, then get the
-            TLS/SSL context method and ciphersuite list. Lastly, initiate a
-            connection to the host.
-
-            :param tuple host: A tuple of the remote host's IP address as a
-                string, and an integer specifying the remote host port, i.e.
-                ('1.1.1.1',443)
-            :raises: :exc:`ConnectionTimeout` if the socket timed out.
-            :returns: A :class:`OpenSSL.SSL.Connection <Connection>`.
-            """
-            addr, port = host
-            sckt = self.buildSocket(addr)
-            context = self.getContext()
-            connection = SSL.Connection(context, sckt)
-            try:
-               connection.connect(host)
-            except socket_timeout as stmo:
-               error = ConnectionTimeout(stmo.message)
-               return failure.Failure(error)
-            else:
-               return connection
-
-        def connectionFailed(connection, host):
-            """Handle errors raised while attempting to create the socket and
-            :class:`OpenSSL.SSL.Connection <Connection>`, and setting the
-            TLS/SSL context.
-
-            :type connection: :exc:Exception
-            :param connection: The exception that was raised in
-                :func:`HandshakeTest.test_handshake.makeConnection
-                <makeConnection>`.
-            :param tuple host: A tuple of the host IP address as a string, and
-                an int specifying the host port, i.e. ('1.1.1.1', 443)
-            :rtype: :exc:Exception
-            :returns: The original exception.
-            """
-            addr, port = host
-
-            if not isinstance(connection, SSL.Connection):
-                if isinstance(connection, IOError):
-                    ## On some *nix distros, /dev/random is 0600 root:root and
-                    ## we get a permissions error when trying to read
-                    if connection.message.find("[Errno 13]"):
-                        raise InsufficientPrivileges(
-                            "%s" % connection.message.split("[Errno 13]", 1)[1])
-                elif isinstance(connection, socket_error):
-                    if connection.message.find("[Errno 101]"):
-                        raise HostUnreachableError(
-                            "Host unreachable: %s:%s" % (addr, port))
-                elif isinstance(connection, Exception):
-                    log.debug("connectionFailed: got Exception:")
-                    log.err("Connection failed with reason: %s"
-                            % connection.message)
-                else:
-                    log.err("Connection failed with reason: %s" % str(connection))
-
-            self.report['host'] = addr
-            self.report['port'] = port
-            self.report['state'] = 'CONNECTION_FAILED'
-
-            return connection
-
-        def connectionSucceeded(connection, host, timeout):
-            """If we have created a connection, set the socket options, and log
-            the connection state and peer name.
-
-            :param connection: A :class:`OpenSSL.SSL.Connection <Connection>`.
-            :param tuple host: A tuple of the remote host's IP address as a
-                string, and an integer specifying the remote host port, i.e.
-                ('1.1.1.1',443)
-            """
-
-            ## xxx TODO to get this to work with a non-blocking socket, see how
-            ##     twisted.internet.tcp.Client handles socket objects.
-            connection.setblocking(1)
-
-            ## Set the timeout on the connection:
-            ##
-            ## We want to set SO_RCVTIMEO and SO_SNDTIMEO, which both are
-            ## defined in the socket option definitions in <sys/socket.h>, and
-            ## which both take as their value, according to socket(7), a
-            ## struct timeval, which is defined in the libc manual:
-            ## https://www.gnu.org/software/libc/manual/html_node/Elapsed-Time.html
-            timeval = struct.pack('ll', int(timeout), 0)
-            connection.setsockopt(socket.SOL_SOCKET, socket.SO_RCVTIMEO, timeval)
-            connection.setsockopt(socket.SOL_SOCKET, socket.SO_SNDTIMEO, timeval)
-
-            ## Set the connection state to client mode:
-            connection.set_connect_state()
-
-            peer_name, peer_port = connection.getpeername()
-            if peer_name:
-                log.msg("Connected to %s" % peer_name)
-            else:
-                log.debug("Couldn't get peer name from connection: %s" % host)
-                log.msg("Connected to %s" % host)
-            log.debug("Connection state: %s " % connection.state_string())
-
-            return connection
-
-        def connectionRenegotiate(connection, host, error_message):
-            """Handle a server-initiated SSL/TLS handshake renegotiation.
-
-            :param connection: A :class:`OpenSSL.SSL.Connection <Connection>`.
-            :param tuple host: A tuple of the remote host's IP address as a
-                string, and an integer specifying the remote host port, i.e.
-                ('1.1.1.1',443)
-            """
-
-            log.msg("Server requested renegotiation from: %s" % host)
-            log.debug("Renegotiation reason: %s" % error_message)
-            log.debug("State: %s" % connection.state_string())
-
-            if connection.renegotiate():
-                log.debug("Renegotiation possible.")
-                log.msg("Retrying handshake with %s..." % host)
-                try:
-                    connection.do_handshake()
-                    while connection.renegotiate_pending():
-                        log.msg("Renegotiation with %s in progress..." % host)
-                        log.debug("State: %s" % connection.state_string())
-                        sleep(1)
-                    else:
-                        log.msg("Renegotiation with %s complete!" % host)
-                except SSL.WantReadError, wre:
-                    connection = handleWantRead(connection)
-                    log.debug("State: %s" % connection.state_string())
-                except SSL.WantWriteError, wwe:
-                    connection = handleWantWrite(connection)
-                    log.debug("State: %s" % connection.state_string())
-            return connection
-
-        def connectionShutdown(connection, host):
-            """Handle shutting down a :class:`OpenSSL.SSL.Connection
-            <Connection>`, including correct handling of halfway shutdown
-            connections.
-
-            Calls to :meth:`OpenSSL.SSL.Connection.shutdown
-            <Connection.shutdown()>` return a boolean value -- if the
-            connection is already shutdown, it returns True, else it returns
-            false. Thus we loop through a block which detects if the connection
-            is an a partial shutdown state and corrects that if that is the
-            case, else it waits for one second, then attempts shutting down the
-            connection again.
-
-            Detection of a partial shutdown state is done through
-            :meth:`OpenSSL.SSL.Connection.get_shutdown
-            <Connection.get_shutdown()>` which queries OpenSSL for a bitvector
-            of the server and client shutdown states. For example, the binary
-            string '0b00' is an open connection, and '0b10' is a partially
-            closed connection that has been shutdown on the serverside.
-
-            :param connection: A :class:`OpenSSL.SSL.Connection <Connection>`.
-            :param tuple host: A tuple of the remote host's IP address as a
-                string, and an integer specifying the remote host port, i.e.
-                ('1.1.1.1',443)
-            """
-
-            peername, peerport = host
-
-            if isinstance(connection, SSL.Connection):
-                log.msg("Closing connection to %s:%d..." % (peername, peerport))
-                while not connection.shutdown():
-                    ## if the connection is halfway shutdown, we have to
-                    ## wait for a ZeroReturnError on connection.recv():
-                    if (bin(connection.get_shutdown()) == '0b01') \
-                            or (bin(connection.get_shutdown()) == '0b10'):
-                        try:
-                            _read_buffer = connection.pending()
-                            connection.recv(_read_buffer)
-                        except SSL.ZeroReturnError, zre: continue
-                    else:
-                        sleep(1)
-                else:
-                    log.msg("Closed connection to %s:%d"
-                            % (peername, peerport))
-            elif isinstance(connection, types.NoneType):
-                log.debug("connectionShutdown: got NoneType for connection")
-                return
-            else:
-                log.debug("connectionShutdown: expected connection, got %r"
-                          % connection.__repr__())
-
-            return connection
-
-        def handleWantRead(connection):
-            """From OpenSSL memory BIO documentation on ssl_read():
-
-                If the underlying BIO is blocking, SSL_read() will only
-                return, once the read operation has been finished or an error
-                occurred, except when a renegotiation take place, in which
-                case a SSL_ERROR_WANT_READ may occur. This behaviour can be
-                controlled with the SSL_MODE_AUTO_RETRY flag of the
-                SSL_CTX_set_mode(3) call.
-
-                If the underlying BIO is non-blocking, SSL_read() will also
-                return when the underlying BIO could not satisfy the needs of
-                SSL_read() to continue the operation. In this case a call to
-                SSL_get_error(3) with the return value of SSL_read() will
-                yield SSL_ERROR_WANT_READ or SSL_ERROR_WANT_WRITE. As at any
-                time a re-negotiation is possible, a call to SSL_read() can
-                also cause write operations!  The calling process then must
-                repeat the call after taking appropriate action to satisfy the
-                needs of SSL_read(). The action depends on the underlying
-                BIO. When using a non-blocking socket, nothing is to be done,
-                but select() can be used to check for the required condition.
-
-            And from the OpenSSL memory BIO documentation on ssl_get_error():
-
-                SSL_ERROR_WANT_READ, SSL_ERROR_WANT_WRITE
-
-                The operation did not complete; the same TLS/SSL I/O function
-                should be called again later. If, by then, the underlying BIO
-                has data available for reading (if the result code is
-                SSL_ERROR_WANT_READ) or allows writing data
-                (SSL_ERROR_WANT_WRITE), then some TLS/SSL protocol progress
-                will take place, i.e. at least part of an TLS/SSL record will
-                be read or written. Note that the retry may again lead to a
-                SSL_ERROR_WANT_READ or SSL_ERROR_WANT_WRITE condition. There
-                is no fixed upper limit for the number of iterations that may
-                be necessary until progress becomes visible at application
-                protocol level.
-
-                For socket BIOs (e.g. when SSL_set_fd() was used), select() or
-                poll() on the underlying socket can be used to find out when
-                the TLS/SSL I/O function should be retried.
-
-                Caveat: Any TLS/SSL I/O function can lead to either of
-                SSL_ERROR_WANT_READ and SSL_ERROR_WANT_WRITE. In particular,
-                SSL_read() or SSL_peek() may want to write data and
-                SSL_write() may want to read data. This is mainly because
-                TLS/SSL handshakes may occur at any time during the protocol
-                (initiated by either the client or the server); SSL_read(),
-                SSL_peek(), and SSL_write() will handle any pending
-                handshakes.
-
-            Also, see http://stackoverflow.com/q/3952104
-            """
-            try:
-                while connection.want_read():
-                    self.state = connection.state_string()
-                    log.debug("Connection to %s HAS want_read" % host)
-                    _read_buffer = connection.pending()
-                    log.debug("Rereading %d bytes..." % _read_buffer)
-                    sleep(1)
-                    rereceived = connection.recv(int(_read_buffer))
-                    log.debug("Received %d bytes" % rereceived)
-                    log.debug("State: %s" % connection.state_string())
-                else:
-                    self.state = connection.state_string()
-                    peername, peerport = connection.getpeername()
-                    log.debug("Connection to %s:%s DOES NOT HAVE want_read"
-                              % (peername, peerport))
-                    log.debug("State: %s" % connection.state_string())
-            except SSL.WantWriteError, wwe:
-                self.state = connection.state_string()
-                log.debug("Got WantWriteError while handling want_read")
-                log.debug("WantWriteError: %s" % wwe.message)
-                log.debug("Switching to handleWantWrite()...")
-                handleWantWrite(connection)
-            return connection
-
-        def handleWantWrite(connection):
-            """See :func:HandshakeTest.test_hanshake.handleWantRead """
-            try:
-                while connection.want_write():
-                    self.state = connection.state_string()
-                    log.debug("Connection to %s HAS want_write" % host)
-                    sleep(1)
-                    resent = connection.send("o\r\n")
-                    log.debug("Sent: %d" % resent)
-                    log.debug("State: %s" % connection.state_string())
-            except SSL.WantReadError, wre:
-                self.state = connection.state_string()
-                log.debug("Got WantReadError while handling want_write")
-                log.debug("WantReadError: %s" % wre.message)
-                log.debug("Switching to handleWantRead()...")
-                handleWantRead(connection)
-            return connection
-
-        def doHandshake(connection):
-            """Attempt a TLS/SSL handshake with the host.
-
-            If, after the first attempt at handshaking, OpenSSL's memory BIO
-            state machine does not report success, then try reading and
-            writing from the connection, and handle any SSL_ERROR_WANT_READ or
-            SSL_ERROR_WANT_WRITE which occurs.
-
-            If multiple want_reads occur, then try renegotiation with the
-            host, and start over. If multiple want_writes occur, then it is
-            possible that the connection has timed out, and move on to the
-            connectionShutdown step.
-
-            :param connection: A :class:`OpenSSL.SSL.Connection <Connection>`.
-            :ivar peername: The host IP address, as reported by
-                :meth:`Connection.getpeername <connection.getpeername()>`.
-            :ivar peerport: The host port, reported by
-                :meth:`Connection.getpeername <connection.getpeername()>`.
-            :ivar int sent: The number of bytes sent to to the remote host.
-            :ivar int received: The number of bytes received from the remote
-                                host.
-            :ivar int _read_buffer: The max bytes that can be read from the
-                                    connection.
-            :returns: The :param:`doHandshake.connection <connection>` with
-                      handshake completed, else the unhandled error that was
-                      raised.
-            """
-            peername, peerport = connection.getpeername()
-
-            try:
-                log.msg("Attempting handshake: %s" % peername)
-                connection.do_handshake()
-            except OpenSSL.SSL.WantReadError() as wre:
-                self.state = connection.state_string()
-                log.debug("Handshake state: %s" % self.state)
-                log.debug("doHandshake: WantReadError on first handshake attempt.")
-                connection = handleWantRead(connection)
-            except OpenSSL.SSL.WantWriteError() as wwe:
-                self.state = connection.state_string()
-                log.debug("Handshake state: %s" % self.state)
-                log.debug("doHandshake: WantWriteError on first handshake attempt.")
-                connection = handleWantWrite(connection)
-            else:
-                self.state = connection.state_string()
-
-            if self.state == 'SSL negotiation finished successfully':
-                ## jump to handshakeSuccessful and get certchain
-                return connection
-            else:
-                sent = connection.send("o\r\n")
-                self.state = connection.state_string()
-                log.debug("Handshake state: %s" % self.state)
-                log.debug("Transmitted %d bytes" % sent)
-
-                _read_buffer = connection.pending()
-                log.debug("Max bytes in receive buffer: %d" % _read_buffer)
-
-                try:
-                    received = connection.recv(int(_read_buffer))
-                except SSL.WantReadError, wre:
-                    if connection.want_read():
-                        self.state = connection.state_string()
-                        connection = handleWantRead(connection)
-                    else:
-                        ## if we still have an SSL_ERROR_WANT_READ, then try to
-                        ## renegotiate
-                        self.state = connection.state_string()
-                        connection = connectionRenegotiate(connection,
-                                                           connection.getpeername(),
-                                                           wre.message)
-                except SSL.WantWriteError, wwe:
-                    self.state = connection.state_string()
-                    log.debug("Handshake state: %s" % self.state)
-                    if connection.want_write():
-                        connection = handleWantWrite(connection)
-                    else:
-                        raise ConnectionTimeout("Connection to %s:%d timed out."
-                                                % (peername, peerport))
-                else:
-                    log.msg("Received: %s" % received)
-                    self.state = connection.state_string()
-                    log.debug("Handshake state: %s" % self.state)
-
-            return connection
-
-        def handshakeSucceeded(connection):
-            """Get the details from the server certificate, cert chain, and
-            server ciphersuite list, and put them in our report.
-
-            WARNING: do *not* do this:
-            >>> server_cert.get_pubkey()
-                <OpenSSL.crypto.PKey at 0x4985d28>
-            >>> pk = server_cert.get_pubkey()
-            >>> pk.check()
-                Segmentation fault
-
-            :param connection: A :class:`OpenSSL.SSL.Connection <Connection>`.
-            :returns: :param:`handshakeSucceeded.connection <connection>`.
-            """
-            host, port = connection.getpeername()
-            log.msg("Handshake with %s:%d successful!" % (host, port))
-
-            server_cert = self.getPeerCert(connection)
-            server_cert_chain = self.getPeerCert(connection, get_chain=True)
-
-            renegotiations = connection.total_renegotiations()
-            cipher_list    = connection.get_cipher_list()
-            session_key    = connection.master_key()
-            rawcert        = connection.get_peer_certificate()
-            ## xxx TODO this hash needs to be formatted as SHA1, not long
-            cert_subj_hash = rawcert.subject_name_hash()
-            cert_serial    = rawcert.get_serial_number()
-            cert_sig_algo  = rawcert.get_signature_algorithm()
-            cert_subject   = self.getX509Name(rawcert.get_subject(),
-                                              get_components=True)
-            cert_issuer    = self.getX509Name(rawcert.get_issuer(),
-                                              get_components=True)
-            cert_pubkey    = self.getPublicKey(rawcert.get_pubkey())
-
-            self.report['host'] = host
-            self.report['port'] = port
-            self.report['state'] = self.state
-            self.report['renegotiations'] = renegotiations
-            self.report['server_cert'] = server_cert
-            self.report['server_cert_chain'] = \
-                ''.join([cert for cert in server_cert_chain])
-            self.report['server_ciphersuite'] = cipher_list
-            self.report['cert_subject'] = cert_subject
-            self.report['cert_subj_hash'] = cert_subj_hash
-            self.report['cert_issuer'] = cert_issuer
-            self.report['cert_public_key'] = cert_pubkey
-            self.report['cert_serial_no'] = cert_serial
-            self.report['cert_sig_algo'] = cert_sig_algo
-            ## The session's master key is only valid for that session, and
-            ## will allow us to decrypt any packet captures (if they were
-            ## collected). Because we are not requesting URLs, only host:port
-            ## (which would be visible in pcaps anyway, since the FQDN is
-            ## never encrypted) I do not see a way for this to log any user or
-            ## identifying information. Correct me if I'm wrong.
-            self.report['session_key'] = session_key
-
-            log.msg("Server certificate:\n\n%s" % server_cert)
-            log.msg("Server certificate chain:\n\n%s"
-                    % ''.join([cert for cert in server_cert_chain]))
-            log.msg("Negotiated ciphersuite:\n%s"
-                    % '\n\t'.join([cipher for cipher in cipher_list]))
-            log.msg("Certificate subject: %s" % cert_subject)
-            log.msg("Certificate subject hash: %d" % cert_subj_hash)
-            log.msg("Certificate issuer: %s" % cert_issuer)
-            log.msg("Certificate public key:\n\n%s" % cert_pubkey)
-            log.msg("Certificate signature algorithm: %s" % cert_sig_algo)
-            log.msg("Certificate serial number: %s" % cert_serial)
-            log.msg("Total renegotiations: %d" % renegotiations)
-
-            return connection
-
-        def handshakeFailed(connection, host):
-            """Handle a failed handshake attempt and report the failure reason.
-
-            :type connection: :class:`twisted.python.failure.Failure <Failure>`
-                or :exc:Exception
-            :param connection: The failed connection.
-            :param tuple host: A tuple of the remote host's IP address as a
-                string, and an integer specifying the remote host port, i.e.
-                ('1.1.1.1',443)
-            :returns: None
-            """
-            addr, port = host
-            log.msg("Handshake with %s:%d failed!" % host)
-
-            self.report['host'] = host
-            self.report['port'] = port
-
-            if isinstance(connection, Exception) \
-                    or isinstance(connection, ConnectionTimeout):
-                log.msg("Handshake failed with reason: %s" % connection.message)
-                self.report['state'] = connection.message
-            elif isinstance(connection, failure.Failure):
-                log.msg("Handshake failed with reason: Socket %s"
-                        % connection.getErrorMessage())
-                self.report['state'] = connection.getErrorMessage()
-                ctmo = connection.trap(ConnectionTimeout)
-                if ctmo == ConnectionTimeout:
-                    connection.cleanFailure()
-            else:
-                log.msg("Handshake failed with reason: %s" % str(connection))
-                if not 'state' in self.report.keys():
-                    self.report['state'] = str(connection)
-
-            return None
-
-        def deferMakeConnection(host):
-            return threads.deferToThread(makeConnection, self.input)
-
-        if self.host and not self.input:
-            self.input = self.splitInput(self.host)
-        log.msg("Beginning handshake test for %s:%s" % self.input)
-
-        connection = deferMakeConnection(self.input)
-        connection.addCallbacks(connectionSucceeded, connectionFailed,
-                                callbackArgs=[self.input, self.timeout],
-                                errbackArgs=[self.input])
-
-        handshake = defer.Deferred()
-        handshake.addCallback(doHandshake)
-        handshake.addCallbacks(handshakeSucceeded, handshakeFailed,
-                               errbackArgs=[self.input])
-
-        connection.chainDeferred(handshake)
-        connection.addCallbacks(connectionShutdown, defer.passthru,
-                                callbackArgs=[self.input])
-        connection.addBoth(log.exception)
-
-        return connection
diff --git a/data/nettests/manipulation/__init__.py b/data/nettests/manipulation/__init__.py
deleted file mode 100644
index e69de29..0000000
diff --git a/data/nettests/manipulation/captiveportal.py b/data/nettests/manipulation/captiveportal.py
deleted file mode 100644
index a0f8c6b..0000000
--- a/data/nettests/manipulation/captiveportal.py
+++ /dev/null
@@ -1,650 +0,0 @@
-# -*- coding: utf-8 -*-
-# captiveportal
-# *************
-#
-# This test is a collection of tests to detect the presence of a
-# captive portal. Code is taken, in part, from the old ooni-probe,
-# which was written by Jacob Appelbaum and Arturo Filastò.
-#
-# This module performs multiple tests that match specific vendor captive
-# portal tests. This is a basic internet captive portal filter tester written
-# for RECon 2011.
-#
-# Read the following URLs to understand the captive portal detection process
-# for various vendors:
-#
-# http://technet.microsoft.com/en-us/library/cc766017%28WS.10%29.aspx
-# http://blog.superuser.com/2011/05/16/windows-7-network-awareness/
-# http://isc.sans.org/diary.html?storyid=10312&;
-# http://src.chromium.org/viewvc/chrome?view=rev&revision=74608
-# http://code.google.com/p/chromium-os/issues/detail?3281ttp,
-# http://crbug.com/52489
-# http://crbug.com/71736
-# https://bugzilla.mozilla.org/show_bug.cgi?id=562917
-# https://bugzilla.mozilla.org/show_bug.cgi?id=603505
-# http://lists.w3.org/Archives/Public/ietf-http-wg/2011JanMar/0086.html
-# http://tools.ietf.org/html/draft-nottingham-http-portal-02
-#
-# :authors: Jacob Appelbaum, Arturo Filastò, Isis Lovecruft
-# :license: see LICENSE for more details
-
-import base64
-import os
-import random
-import re
-import string
-import urllib2
-from urlparse import urlparse
-
-from twisted.python import usage
-from twisted.internet import defer, threads
-
-from ooni import nettest
-from ooni.templates import httpt
-from ooni.utils import net
-from ooni.utils import log
-
-try:
-    from dns import resolver
-except ImportError:
-    print "The dnspython module was not found:"
-    print "See https://crate.io/packages/dnspython/";
-    resolver = None
-
-__plugoo__ = "captiveportal"
-__desc__ = "Captive portal detection test"
-
-class UsageOptions(usage.Options):
-    optParameters = [['asset', 'a', None, 'Asset file'],
-                 ['experiment-url', 'e', 'http://google.com/', 'Experiment URL'],
-                 ['user-agent', 'u', random.choice(net.userAgents),
-                  'User agent for HTTP requests']
-                ]
-
-class CaptivePortal(nettest.NetTestCase):
-    """
-    Compares content and status codes of HTTP responses, and attempts
-    to determine if content has been altered.
-    """
-
-    name = "captivep"
-    description = "Captive Portal Test"
-    version = '0.2'
-    author = "Isis Lovecruft"
-    usageOptions = UsageOptions
-
-    def http_fetch(self, url, headers={}):
-        """
-        Parses an HTTP url, fetches it, and returns a urllib2 response
-        object.
-        """
-        url = urlparse(url).geturl()
-        request = urllib2.Request(url, None, headers)
-        #XXX: HTTP Error 302: The HTTP server returned a redirect error that
-        #would lead to an infinite loop.  The last 30x error message was: Found
-        try:
-            response = urllib2.urlopen(request)
-            response_headers = dict(response.headers)
-            return response, response_headers
-        except urllib2.HTTPError, e:
-            log.err("HTTPError: %s" % e)
-            return None, None
-
-    def http_content_match_fuzzy_opt(self, experimental_url, control_result,
-                                     headers=None, fuzzy=False):
-        """
-        Makes an HTTP request on port 80 for experimental_url, then
-        compares the response_content of experimental_url with the
-        control_result. Optionally, if the fuzzy parameter is set to
-        True, the response_content is compared with a regex of the
-        control_result. If the response_content from the
-        experimental_url and the control_result match, returns True
-        with the HTTP status code and headers; False, status code, and
-        headers if otherwise.
-        """
-
-        if headers is None:
-            default_ua = self.local_options['user-agent']
-            headers = {'User-Agent': default_ua}
-
-        response, response_headers = self.http_fetch(experimental_url, headers)
-
-        response_content = response.read() if response else None
-        response_code = response.code if response else None
-        if response_content is None:
-            log.err("HTTP connection appears to have failed.")
-            return False, False, False
-
-        if fuzzy:
-            pattern = re.compile(control_result)
-            match = pattern.search(response_content)
-            log.msg("Fuzzy HTTP content comparison for experiment URL")
-            log.msg("'%s'" % experimental_url)
-            if not match:
-                log.msg("does not match!")
-                return False, response_code, response_headers
-            else:
-                log.msg("and the expected control result yielded a match.")
-                return True, response_code, response_headers
-        else:
-            if str(response_content) != str(control_result):
-                log.msg("HTTP content comparison of experiment URL")
-                log.msg("'%s'" % experimental_url)
-                log.msg("and the expected control result do not match.")
-                return False, response_code, response_headers
-            else:
-                return True, response_code, response_headers
-
-    def http_status_code_match(self, experiment_code, control_code):
-        """
-        Compare two HTTP status codes, returns True if they match.
-        """
-        return int(experiment_code) == int(control_code)
-
-    def http_status_code_no_match(self, experiment_code, control_code):
-        """
-        Compare two HTTP status codes, returns True if they do not match.
-        """
-        return int(experiment_code) != int(control_code)
-
-    def dns_resolve(self, hostname, nameserver=None):
-        """
-        Resolves hostname(s) though nameserver to corresponding
-        address(es). hostname may be either a single hostname string,
-        or a list of strings. If nameserver is not given, use local
-        DNS resolver, and if that fails try using 8.8.8.8.
-        """
-        if not resolver:
-            log.msg("dnspython is not installed.\
-                    Cannot perform DNS Resolve test")
-            return []
-        if isinstance(hostname, str):
-            hostname = [hostname]
-
-        if nameserver is not None:
-            res = resolver.Resolver(configure=False)
-            res.nameservers = [nameserver]
-        else:
-            res = resolver.Resolver()
-
-        response = []
-        answer = None
-
-        for hn in hostname:
-            try:
-                answer = res.query(hn)
-            except resolver.NoNameservers:
-                res.nameservers = ['8.8.8.8']
-                try:
-                    answer = res.query(hn)
-                except resolver.NXDOMAIN:
-                    log.msg("DNS resolution for %s returned NXDOMAIN" % hn)
-                    response.append('NXDOMAIN')
-            except resolver.NXDOMAIN:
-                log.msg("DNS resolution for %s returned NXDOMAIN" % hn)
-                response.append('NXDOMAIN')
-            finally:
-                if not answer:
-                    return response
-                for addr in answer:
-                    response.append(addr.address)
-        return response
-
-    def dns_resolve_match(self, experiment_hostname, control_address):
-        """
-        Resolve experiment_hostname, and check to see that it returns
-        an experiment_address which matches the control_address.  If
-        they match, returns True and experiment_address; otherwise
-        returns False and experiment_address.
-        """
-        experiment_address = self.dns_resolve(experiment_hostname)
-        if not experiment_address:
-            log.debug("dns_resolve() for %s failed" % experiment_hostname)
-            return None, experiment_address
-
-        if len(set(experiment_address) & set([control_address])) > 0:
-            return True, experiment_address
-        else:
-            log.msg("DNS comparison of control '%s' does not" % control_address)
-            log.msg("match experiment response '%s'" % experiment_address)
-            return False, experiment_address
-
-    def get_auth_nameservers(self, hostname):
-        """
-        Many CPs set a nameserver to be used. Let's query that
-        nameserver for the authoritative nameservers of hostname.
-
-        The equivalent of:
-        $ dig +short NS ooni.nu
-        """
-        if not resolver:
-            log.msg("dnspython not installed.")
-            log.msg("Cannot perform test.")
-            return []
-
-        res = resolver.Resolver()
-        answer = res.query(hostname, 'NS')
-        auth_nameservers = []
-        for auth in answer:
-            auth_nameservers.append(auth.to_text())
-        return auth_nameservers
-
-    def hostname_to_0x20(self, hostname):
-        """
-        MaKEs yOur HOsTnaME lOoK LiKE THis.
-
-        For more information, see:
-        D. Dagon, et. al. "Increased DNS Forgery Resistance
-        Through 0x20-Bit Encoding". Proc. CSS, 2008.
-        """
-        hostname_0x20 = ''
-        for char in hostname:
-            l33t = random.choice(['caps', 'nocaps'])
-            if l33t == 'caps':
-                hostname_0x20 += char.capitalize()
-            else:
-                hostname_0x20 += char.lower()
-        return hostname_0x20
-
-    def check_0x20_to_auth_ns(self, hostname, sample_size=None):
-        """
-        Resolve a 0x20 DNS request for hostname over hostname's
-        authoritative nameserver(s), and check to make sure that
-        the capitalization in the 0x20 request matches that of the
-        response. Also, check the serial numbers of the SOA (Start
-        of Authority) records on the authoritative nameservers to
-        make sure that they match.
-
-        If sample_size is given, a random sample equal to that number
-        of authoritative nameservers will be queried; default is 5.
-        """
-        log.msg("")
-        log.msg("Testing random capitalization of DNS queries...")
-        log.msg("Testing that Start of Authority serial numbers match...")
-
-        auth_nameservers = self.get_auth_nameservers(hostname)
-
-        if sample_size is None:
-            sample_size = 5
-            resolved_auth_ns = random.sample(self.dns_resolve(auth_nameservers),
-                                             sample_size)
-
-        querynames = []
-        answernames = []
-        serials = []
-
-        # Even when gevent monkey patching is on, the requests here
-        # are sent without being 0x20'd, so we need to 0x20 them.
-        hostname = self.hostname_to_0x20(hostname)
-
-        for auth_ns in resolved_auth_ns:
-            res = resolver.Resolver(configure=False)
-            res.nameservers = [auth_ns]
-            try:
-                answer = res.query(hostname, 'SOA')
-            except resolver.Timeout:
-                continue
-            querynames.append(answer.qname.to_text())
-            answernames.append(answer.rrset.name.to_text())
-            for soa in answer:
-                serials.append(str(soa.serial))
-
-        if len(set(querynames).intersection(answernames)) == 1:
-            log.msg("Capitalization in DNS queries and responses match.")
-            name_match = True
-        else:
-            log.msg("The random capitalization '%s' used in" % hostname)
-            log.msg("DNS queries to that hostname's authoritative")
-            log.msg("nameservers does not match the capitalization in")
-            log.msg("the response.")
-            name_match = False
-
-        if len(set(serials)) == 1:
-            log.msg("Start of Authority serial numbers all match.")
-            serial_match = True
-        else:
-            log.msg("Some SOA serial numbers did not match the rest!")
-            serial_match = False
-
-        ret = name_match, serial_match, querynames, answernames, serials
-
-        if name_match and serial_match:
-            log.msg("Your DNS queries do not appear to be tampered.")
-            return ret
-        elif name_match or serial_match:
-            log.msg("Something is tampering with your DNS queries.")
-            return ret
-        elif not name_match and not serial_match:
-            log.msg("Your DNS queries are definitely being tampered with.")
-            return ret
-
-    def get_random_url_safe_string(self, length):
-        """
-        Returns a random url-safe string of specified length, where
-        0 < length <= 256. The returned string will always start with
-        an alphabetic character.
-        """
-        if (length <= 0):
-            length = 1
-        elif (length > 256):
-            length = 256
-
-        random_ascii = base64.urlsafe_b64encode(os.urandom(int(length)))
-
-        while not random_ascii[:1].isalpha():
-            random_ascii = base64.urlsafe_b64encode(os.urandom(int(length)))
-
-        three_quarters = int((len(random_ascii)) * (3.0/4.0))
-        random_string = random_ascii[:three_quarters]
-        return random_string
-
-    def get_random_hostname(self, length=None):
-        """
-        Returns a random hostname with SLD of specified length. If
-        length is unspecified, length=32 is used.
-
-        These *should* all resolve to NXDOMAIN. If they actually
-        resolve to a box that isn't part of a captive portal that
-        would be rather interesting.
-        """
-        if length is None:
-            length = 32
-
-        random_sld = self.get_random_url_safe_string(length)
-
-        # if it doesn't start with a letter, chuck it.
-        while not random_sld[:1].isalpha():
-            random_sld = self.get_random_url_safe_string(length)
-
-        tld_list = ['.com', '.net', '.org', '.info', '.test', '.invalid']
-        random_tld = urllib2.random.choice(tld_list)
-        random_hostname = random_sld + random_tld
-        return random_hostname
-
-    def compare_random_hostnames(self, hostname_count=None, hostname_length=None):
-        """
-        Get hostname_count number of random hostnames with SLD length
-        of hostname_length, and then attempt DNS resolution. If no
-        arguments are given, default to three hostnames of 32 bytes
-        each. These random hostnames *should* resolve to NXDOMAIN,
-        except in the case where a user is presented with a captive
-        portal and remains unauthenticated, in which case the captive
-        portal may return the address of the authentication page.
-
-        If the cardinality of the intersection of the set of resolved
-        random hostnames and the single element control set
-        (['NXDOMAIN']) are equal to one, then DNS properly resolved.
-
-        Returns true if only NXDOMAINs were returned, otherwise returns
-        False with the relative complement of the control set in the
-        response set.
-        """
-        if hostname_count is None:
-            hostname_count = 3
-
-        log.msg("Generating random hostnames...")
-        log.msg("Resolving DNS for %d random hostnames..." % hostname_count)
-
-        control = ['NXDOMAIN']
-        responses = []
-
-        for x in range(hostname_count):
-            random_hostname = self.get_random_hostname(hostname_length)
-            response_match, response_address = self.dns_resolve_match(random_hostname,
-                                                                      control[0])
-            for address in response_address:
-                if response_match is False:
-                    log.msg("Strangely, DNS resolution of the random hostname")
-                    log.msg("%s actually points to %s"
-                             % (random_hostname, response_address))
-                    responses = responses + [address]
-                else:
-                    responses = responses + [address]
-
-        intersection = set(responses) & set(control)
-        relative_complement = set(responses) - set(control)
-        r = set(responses)
-
-        if len(intersection) == 1:
-            log.msg("All %d random hostnames properly resolved to NXDOMAIN."
-                     % hostname_count)
-            return True, relative_complement
-        elif (len(intersection) == 1) and (len(r) > 1):
-            log.msg("Something odd happened. Some random hostnames correctly")
-            log.msg("resolved to NXDOMAIN, but several others resolved to")
-            log.msg("to the following addresses: %s" % relative_complement)
-            return False, relative_complement
-        elif (len(intersection) == 0) and (len(r) == 1):
-            log.msg("All random hostnames resolved to the IP address ")
-            log.msg("'%s', which is indicative of a captive portal." % r)
-            return False, relative_complement
-        else:
-            log.debug("Apparently, pigs are flying on your network, 'cause a")
-            log.debug("bunch of hostnames made from 32-byte random strings")
-            log.debug("just magically resolved to a bunch of random addresses.")
-            log.debug("That is definitely highly improbable. In fact, my napkin")
-            log.debug("tells me that the probability of just one of those")
-            log.debug("hostnames resolving to an address is 1.68e-59, making")
-            log.debug("it nearly twice as unlikely as an MD5 hash collision.")
-            log.debug("Either someone is seriously messing with your network,")
-            log.debug("or else you are witnessing the impossible. %s" % r)
-            return False, relative_complement
-
-    def google_dns_cp_test(self):
-        """
-        Google Chrome resolves three 10-byte random hostnames.
-        """
-        subtest = "Google Chrome DNS-based"
-        log.msg("Running the Google Chrome DNS-based captive portal test...")
-
-        gmatch, google_dns_result = self.compare_random_hostnames(3, 10)
-
-        if gmatch:
-            log.msg("Google Chrome DNS-based captive portal test did not")
-            log.msg("detect a captive portal.")
-            return google_dns_result
-        else:
-            log.msg("Google Chrome DNS-based captive portal test believes")
-            log.msg("you are in a captive portal, or else something very")
-            log.msg("odd is happening with your DNS.")
-            return google_dns_result
-
-    def ms_dns_cp_test(self):
-        """
-        Microsoft "phones home" to a server which will always resolve
-        to the same address.
-        """
-        subtest = "Microsoft NCSI DNS-based"
-
-        log.msg("")
-        log.msg("Running the Microsoft NCSI DNS-based captive portal")
-        log.msg("test...")
-
-        msmatch, ms_dns_result = self.dns_resolve_match("dns.msftncsi.com",
-                                                        "131.107.255.255")
-        if msmatch:
-            log.msg("Microsoft NCSI DNS-based captive portal test did not")
-            log.msg("detect a captive portal.")
-            return ms_dns_result
-        else:
-            log.msg("Microsoft NCSI DNS-based captive portal test ")
-            log.msg("believes you are in a captive portal.")
-            return ms_dns_result
-
-    def run_vendor_dns_tests(self):
-        """
-        Run the vendor DNS tests.
-        """
-        report = {}
-        report['google_dns_cp'] = self.google_dns_cp_test()
-        report['ms_dns_cp'] = self.ms_dns_cp_test()
-
-        return report
-
-    def run_vendor_tests(self, *a, **kw):
-        """
-        These are several vendor tests used to detect the presence of
-        a captive portal. Each test compares HTTP status code and
-        content to the control results and has its own User-Agent
-        string, in order to emulate the test as it would occur on the
-        device it was intended for. Vendor tests are defined in the
-        format:
-        [exp_url, ctrl_result, ctrl_code, ua, test_name]
-        """
-
-        vendor_tests = [['http://www.apple.com/library/test/success.html',
-                         'Success',
-                         '200',
-                         'Mozilla/5.0 (iPhone; U; CPU like Mac OS X; en) AppleWebKit/420+ (KHTML, like Gecko) Version/3.0 Mobile/1A543a Safari/419.3',
-                         'Apple HTTP Captive Portal'],
-                        ['http://tools.ietf.org/html/draft-nottingham-http-portal-02',
-                         '428 Network Authentication Required',
-                         '428',
-                         'Mozilla/5.0 (Windows NT 6.1; rv:5.0) Gecko/20100101 Firefox/5.0',
-                         'W3 Captive Portal'],
-                        ['http://www.msftncsi.com/ncsi.txt',
-                         'Microsoft NCSI',
-                         '200',
-                         'Microsoft NCSI',
-                         'MS HTTP Captive Portal',]]
-
-        cm = self.http_content_match_fuzzy_opt
-        sm = self.http_status_code_match
-        snm = self.http_status_code_no_match
-
-        def compare_content(status_func, fuzzy, experiment_url, control_result,
-                            control_code, headers, test_name):
-            log.msg("")
-            log.msg("Running the %s test..." % test_name)
-
-            content_match, experiment_code, experiment_headers = cm(experiment_url,
-                                                                    control_result,
-                                                                    headers, fuzzy)
-            status_match = status_func(experiment_code, control_code)
-
-            if status_match and content_match:
-                log.msg("The %s test was unable to detect" % test_name)
-                log.msg("a captive portal.")
-                return True
-            else:
-                log.msg("The %s test shows that your network" % test_name)
-                log.msg("is filtered.")
-                return False
-
-        result = []
-        for vt in vendor_tests:
-            report = {}
-            report['vt'] = vt
-
-            experiment_url = vt[0]
-            control_result = vt[1]
-            control_code = vt[2]
-            headers = {'User-Agent': vt[3]}
-            test_name = vt[4]
-
-            args = (experiment_url, control_result, control_code, headers, test_name)
-
-            if test_name == "MS HTTP Captive Portal":
-                report['result'] = compare_content(sm, False, *args)
-
-            elif test_name == "Apple HTTP Captive Portal":
-                report['result'] = compare_content(sm, True, *args)
-
-            elif test_name == "W3 Captive Portal":
-                report['result'] = compare_content(snm, True, *args)
-
-            else:
-                log.err("Ooni is trying to run an undefined CP vendor test.")
-            result.append(report)
-        return result
-
-    def control(self, experiment_result, args):
-        """
-        Compares the content and status code of the HTTP response for
-        experiment_url with the control_result and control_code
-        respectively. If the status codes match, but the experimental
-        content and control_result do not match, fuzzy matching is enabled
-        to determine if the control_result is at least included somewhere
-        in the experimental content. Returns True if matches are found,
-        and False if otherwise.
-        """
-        # XXX put this back to being parametrized
-        #experiment_url = self.local_options['experiment-url']
-        experiment_url = 'http://google.com/'
-        control_result = 'XX'
-        control_code = 200
-        ua = self.local_options['user-agent']
-
-        cm = self.http_content_match_fuzzy_opt
-        sm = self.http_status_code_match
-        snm = self.http_status_code_no_match
-
-        log.msg("Running test for '%s'..." % experiment_url)
-        content_match, experiment_code, experiment_headers = cm(experiment_url,
-                                                                control_result)
-        status_match = sm(experiment_code, control_code)
-        if status_match and content_match:
-            log.msg("The test for '%s'" % experiment_url)
-            log.msg("was unable to detect a captive portal.")
-
-            self.report['result'] = True
-
-        elif status_match and not content_match:
-            log.msg("Retrying '%s' with fuzzy match enabled."
-                     % experiment_url)
-            fuzzy_match, experiment_code, experiment_headers = cm(experiment_url,
-                                                                  control_result,
-                                                                  fuzzy=True)
-            if fuzzy_match:
-                self.report['result'] = True
-            else:
-                log.msg("Found modified content on '%s'," % experiment_url)
-                log.msg("which could indicate a captive portal.")
-
-                self.report['result'] = False
-        else:
-            log.msg("The content comparison test for ")
-            log.msg("'%s'" % experiment_url)
-            log.msg("shows that your HTTP traffic is filtered.")
-
-            self.report['result'] = False
-
-    @defer.inlineCallbacks
-    def test_captive_portal(self):
-        """
-        Runs the CaptivePortal(Test).
-
-        CONFIG OPTIONS
-        --------------
-
-        If "do_captive_portal_vendor_tests" is set to "true", then vendor
-        specific captive portal HTTP-based tests will be run.
-
-        If "do_captive_portal_dns_tests" is set to "true", then vendor
-        specific captive portal DNS-based tests will be run.
-
-        If "check_dns_requests" is set to "true", then Ooni-probe will
-        attempt to check that your DNS requests are not being tampered with
-        by a captive portal.
-
-        If "captive_portal" = "yourfilename.txt", then user-specified tests
-        will be run.
-
-        Any combination of the above tests can be run.
-        """
-
-        log.msg("")
-        log.msg("Running vendor tests...")
-        self.report['vendor_tests'] = yield threads.deferToThread(self.run_vendor_tests)
-
-        log.msg("")
-        log.msg("Running vendor DNS-based tests...")
-        self.report['vendor_dns_tests'] = yield threads.deferToThread(self.run_vendor_dns_tests)
-
-        log.msg("")
-        log.msg("Checking that DNS requests are not being tampered...")
-        self.report['check0x20'] = yield threads.deferToThread(self.check_0x20_to_auth_ns, 'ooni.nu')
-
-        log.msg("")
-        log.msg("Captive portal test finished!")
-
diff --git a/data/nettests/manipulation/daphne.py b/data/nettests/manipulation/daphne.py
deleted file mode 100644
index 09279fa..0000000
--- a/data/nettests/manipulation/daphne.py
+++ /dev/null
@@ -1,119 +0,0 @@
-# -*- encoding: utf-8 -*-
-from twisted.python import usage
-from twisted.internet import protocol, endpoints, reactor
-
-from ooni import nettest
-from ooni.kit import daphn3
-from ooni.utils import log
-
-class Daphn3ClientProtocol(daphn3.Daphn3Protocol):
-    def nextStep(self):
-        log.debug("Moving on to next step in the state walk")
-        self.current_data_received = 0
-        if self.current_step >= (len(self.steps) - 1):
-            log.msg("Reached the end of the state machine")
-            log.msg("Censorship fingerpint bisected!")
-            step_idx, mutation_idx = self.factory.mutation
-            log.msg("step_idx: %s | mutation_id: %s" % (step_idx, mutation_idx))
-            #self.transport.loseConnection()
-            if self.report:
-                self.report['mutation_idx'] = mutation_idx
-                self.report['step_idx'] = step_idx
-            self.d.callback(None)
-            return
-        else:
-            self.current_step += 1
-        if self._current_step_role() == self.role:
-            # We need to send more data because we are again responsible for
-            # doing so.
-            self.sendPayload()
-
-
-class Daphn3ClientFactory(protocol.ClientFactory):
-    protocol = daphn3.Daphn3Protocol
-    mutation = [0,0]
-    steps = None
-
-    def buildProtocol(self, addr):
-        p = self.protocol()
-        p.steps = self.steps
-        p.factory = self
-        return p
-
-    def startedConnecting(self, connector):
-        log.msg("Started connecting %s" % connector)
-
-    def clientConnectionFailed(self, reason, connector):
-        log.err("We failed connecting the the OONIB")
-        log.err("Cannot perform test. Perhaps it got blocked?")
-        log.err("Please report this to tor-assistants@xxxxxxxxxxxxxx")
-
-    def clientConnectionLost(self, reason, connector):
-        log.err("Daphn3 client connection lost")
-        print reason
-
-class daphn3Args(usage.Options):
-    optParameters = [
-                     ['host', 'h', '127.0.0.1', 'Target Hostname'],
-                     ['port', 'p', 57003, 'Target port number']]
-
-    optFlags = [['pcap', 'c', 'Specify that the input file is a pcap file'],
-                ['yaml', 'y', 'Specify that the input file is a YAML file (default)']]
-
-class daphn3Test(nettest.NetTestCase):
-
-    name = "Daphn3"
-    usageOptions = daphn3Args
-    inputFile = ['file', 'f', None, 
-            'Specify the pcap or YAML file to be used as input to the test']
-
-    #requiredOptions = ['file']
-
-    steps = None
-
-    def inputProcessor(self, filename):
-        """
-        step_idx is the step in the packet exchange
-        ex.
-        [.X.] are packets sent by a client or a server
-
-            client:  [.1.]        [.3.] [.4.]
-            server:         [.2.]             [.5.]
-
-        mutation_idx: is the sub index of the packet as in the byte of the
-        packet at the step_idx that is to be mutated
-
-        """
-        if self.localOptions['pcap']:
-            daphn3Steps = daphn3.read_pcap(filename)
-        else:
-            daphn3Steps = daphn3.read_yaml(filename)
-        log.debug("Loaded these steps %s" % daphn3Steps)
-        yield daphn3Steps
-
-    def test_daphn3(self):
-        host = self.localOptions['host']
-        port = int(self.localOptions['port'])
-
-        def failure(failure):
-            log.msg("Failed to connect")
-            self.report['censored'] = True
-            self.report['mutation'] = 0
-            raise Exception("Error in connection, perhaps the backend is censored")
-            return
-
-        def success(protocol):
-            log.msg("Successfully connected")
-            protocol.sendPayload()
-            return protocol.d
-
-        log.msg("Connecting to %s:%s" % (host, port))
-        endpoint = endpoints.TCP4ClientEndpoint(reactor, host, port)
-        daphn3_factory = Daphn3ClientFactory()
-        daphn3_factory.steps = self.input
-        daphn3_factory.report = self.report
-        d = endpoint.connect(daphn3_factory)
-        d.addErrback(failure)
-        d.addCallback(success)
-        return d
-
diff --git a/data/nettests/manipulation/dnsspoof.py b/data/nettests/manipulation/dnsspoof.py
deleted file mode 100644
index c9120a4..0000000
--- a/data/nettests/manipulation/dnsspoof.py
+++ /dev/null
@@ -1,70 +0,0 @@
-from twisted.internet import defer
-from twisted.python import usage
-
-from scapy.all import IP, UDP, DNS, DNSQR
-
-from ooni.templates import scapyt
-from ooni.utils import log
-
-class UsageOptions(usage.Options):
-    optParameters = [['resolver', 'r', None,
-                    'Specify the resolver that should be used for DNS queries (ip:port)'],
-                    ['hostname', 'h', None,
-                        'Specify the hostname of a censored site'],
-                    ['backend', 'b', '8.8.8.8:53',
-                        'Specify the IP address of a good DNS resolver (ip:port)']
-                    ]
-
-
-class DNSSpoof(scapyt.ScapyTest):
-    name = "DNS Spoof"
-    timeout = 2
-
-    usageOptions = UsageOptions
-
-    requiredTestHelpers = {'backend': 'dns'}
-    requiredOptions = ['hostname', 'resolver']
-
-    def setUp(self):
-        self.resolverAddr, self.resolverPort = self.localOptions['resolver'].split(':')
-        self.resolverPort = int(self.resolverPort)
-
-        self.controlResolverAddr, self.controlResolverPort = self.localOptions['backend'].split(':')
-        self.controlResolverPort = int(self.controlResolverPort)
-
-        self.hostname = self.localOptions['hostname']
-
-    def postProcessor(self, report):
-        """
-        This is not tested, but the concept is that if the two responses
-        match up then spoofing is occuring.
-        """
-        try:
-            test_answer = report['test_a_lookup']['answered_packets'][0][1]
-            control_answer = report['test_control_a_lookup']['answered_packets'][0][1]
-        except IndexError:
-            self.report['spoofing'] = 'no_answer'
-            return
-
-        if test_answer[UDP] == control_answer[UDP]:
-                self.report['spoofing'] = True
-        else:
-            self.report['spoofing'] = False
-        return
-
-    @defer.inlineCallbacks
-    def test_a_lookup(self):
-        question = IP(dst=self.resolverAddr)/UDP()/DNS(rd=1,
-                qd=DNSQR(qtype="A", qclass="IN", qname=self.hostname))
-        log.msg("Performing query to %s with %s:%s" % (self.hostname, self.resolverAddr, self.resolverPort))
-        yield self.sr1(question)
-
-    @defer.inlineCallbacks
-    def test_control_a_lookup(self):
-        question = IP(dst=self.controlResolverAddr)/UDP()/DNS(rd=1,
-                qd=DNSQR(qtype="A", qclass="IN", qname=self.hostname))
-        log.msg("Performing query to %s with %s:%s" % (self.hostname,
-            self.controlResolverAddr, self.controlResolverPort))
-        yield self.sr1(question)
-
-
diff --git a/data/nettests/manipulation/http_header_field_manipulation.py b/data/nettests/manipulation/http_header_field_manipulation.py
deleted file mode 100644
index 3423442..0000000
--- a/data/nettests/manipulation/http_header_field_manipulation.py
+++ /dev/null
@@ -1,190 +0,0 @@
-# -*- encoding: utf-8 -*-
-#
-# :authors: Arturo Filastò
-# :licence: see LICENSE
-
-import random
-import json
-import yaml
-
-from twisted.python import usage
-
-from ooni.utils import log, net, randomStr
-from ooni.templates import httpt
-from ooni.utils.txagentwithsocks import TrueHeaders
-
-def random_capitalization(string):
-    output = ""
-    original_string = string
-    string = string.swapcase()
-    for i in range(len(string)):
-        if random.randint(0, 1):
-            output += string[i].swapcase()
-        else:
-            output += string[i]
-    if original_string == output:
-        return random_capitalization(output)
-    else:
-        return output
-
-class UsageOptions(usage.Options):
-    optParameters = [
-            ['backend', 'b', 'http://127.0.0.1:57001', 
-                'URL of the backend to use for sending the requests'],
-            ['headers', 'h', None,
-                'Specify a yaml formatted file from which to read the request headers to send']
-            ]
-
-class HTTPHeaderFieldManipulation(httpt.HTTPTest):
-    """
-    It performes HTTP requests with request headers that vary capitalization
-    towards a backend. If the headers reported by the server differ from
-    the ones we sent, then we have detected tampering.
-    """
-    name = "HTTP Header Field Manipulation"
-    author = "Arturo Filastò"
-    version = "0.1.3"
-
-    randomizeUA = False
-    usageOptions = UsageOptions
-
-    requiredTestHelpers = {'backend': 'http-return-json-headers'}
-    requiredOptions = ['backend']
-
-    def get_headers(self):
-        headers = {}
-        if self.localOptions['headers']:
-            try:
-                f = open(self.localOptions['headers'])
-            except IOError:
-                raise Exception("Specified input file does not exist")
-            content = ''.join(f.readlines())
-            f.close()
-            headers = yaml.safe_load(content)
-            return headers
-        else:
-            # XXX generate these from a random choice taken from whatheaders.com
-            # http://s3.amazonaws.com/data.whatheaders.com/whatheaders-latest.xml.zip
-            headers = {"User-Agent": [random.choice(net.userAgents)],
-                "Accept": ["text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8"],
-                "Accept-Encoding": ["gzip,deflate,sdch"],
-                "Accept-Language": ["en-US,en;q=0.8"],
-                "Accept-Charset": ["ISO-8859-1,utf-8;q=0.7,*;q=0.3"],
-                "Host": [randomStr(15)+'.com']
-            }
-            return headers
-
-    def get_random_caps_headers(self):
-        headers = {}
-        normal_headers = self.get_headers()
-        for k, v in normal_headers.items():
-            new_key = random_capitalization(k)
-            headers[new_key] = v
-        return headers
-
-    def processInputs(self):
-        if self.localOptions['backend']:
-            self.url = self.localOptions['backend']
-        else:
-            raise Exception("No backend specified")
-
-    def processResponseBody(self, data):
-        self.check_for_tampering(data)
-
-    def check_for_tampering(self, data):
-        """
-        Here we do checks to verify if the request we made has been tampered
-        with. We have 3 categories of tampering:
-
-        *  **total** when the response is not a json object and therefore we were not
-        able to reach the ooniprobe test backend
-
-        *  **request_line_capitalization** when the HTTP Request line (e.x. GET /
-        HTTP/1.1) does not match the capitalization we set.
-
-        *  **header_field_number** when the number of headers we sent does not match
-        with the ones the backend received
-
-        *  **header_name_capitalization** when the header field names do not match
-        those that we sent.
-
-        *  **header_field_value** when the header field value does not match with the
-        one we transmitted.
-        """
-        log.msg("Checking for tampering on %s" % self.url)
-
-        self.report['tampering'] = {
-            'total': False,
-            'request_line_capitalization': False,
-            'header_name_capitalization': False,
-            'header_field_value': False,
-            'header_field_number': False
-        }
-        try:
-            response = json.loads(data)
-        except ValueError:
-            self.report['tampering']['total'] = True
-            return
-
-        request_request_line = "%s / HTTP/1.1" % self.request_method
-
-        try:
-            response_request_line = response['request_line']
-            response_headers_dict = response['headers_dict']
-        except KeyError:
-            self.report['tampering']['total'] = True
-            return
-
-        if request_request_line != response_request_line:
-            self.report['tampering']['request_line_capitalization'] = True
-
-        request_headers = TrueHeaders(self.request_headers)
-        diff = request_headers.getDiff(TrueHeaders(response_headers_dict),
-                ignore=['Connection'])
-        if diff:
-            self.report['tampering']['header_field_name'] = True
-        else:
-            self.report['tampering']['header_field_name'] = False
-        self.report['tampering']['header_name_diff'] = list(diff)
-        log.msg("    total: %(total)s" % self.report['tampering'])
-        log.msg("    request_line_capitalization: %(request_line_capitalization)s" % self.report['tampering'])
-        log.msg("    header_name_capitalization: %(header_name_capitalization)s" % self.report['tampering'])
-        log.msg("    header_field_value: %(header_field_value)s" % self.report['tampering'])
-        log.msg("    header_field_number: %(header_field_number)s" % self.report['tampering'])
-
-    def test_get(self):
-        self.request_method = "GET"
-        self.request_headers = self.get_random_caps_headers()
-        return self.doRequest(self.url, self.request_method,
-                headers=self.request_headers)
-
-    def test_get_random_capitalization(self):
-        self.request_method = random_capitalization("GET")
-        self.request_headers = self.get_random_caps_headers()
-        return self.doRequest(self.url, self.request_method,
-                headers=self.request_headers)
-
-    def test_post(self):
-        self.request_method = "POST"
-        self.request_headers = self.get_headers()
-        return self.doRequest(self.url, self.request_method,
-                headers=self.request_headers)
-
-    def test_post_random_capitalization(self):
-        self.request_method = random_capitalization("POST")
-        self.request_headers = self.get_random_caps_headers()
-        return self.doRequest(self.url, self.request_method,
-                headers=self.request_headers)
-
-    def test_put(self):
-        self.request_method = "PUT"
-        self.request_headers = self.get_headers()
-        return self.doRequest(self.url, self.request_method,
-                headers=self.request_headers)
-
-    def test_put_random_capitalization(self):
-        self.request_method = random_capitalization("PUT")
-        self.request_headers = self.get_random_caps_headers()
-        return self.doRequest(self.url, self.request_method,
-                headers=self.request_headers)
-
diff --git a/data/nettests/manipulation/http_host.py b/data/nettests/manipulation/http_host.py
deleted file mode 100644
index 2ec517c..0000000
--- a/data/nettests/manipulation/http_host.py
+++ /dev/null
@@ -1,152 +0,0 @@
-# -*- encoding: utf-8 -*-
-#
-# HTTP Host Test
-# **************
-#
-# :authors: Arturo Filastò
-# :licence: see LICENSE
-
-import json
-from twisted.python import usage
-
-from ooni.utils import randomStr, randomSTR
-
-from ooni.utils import log
-from ooni.templates import httpt
-
-class UsageOptions(usage.Options):
-    optParameters = [['backend', 'b', 'http://127.0.0.1:57001',
-                      'URL of the test backend to use. Should be \
-                              listening on port 80 and be a \
-                              HTTPReturnJSONHeadersHelper'],
-                     ['content', 'c', None, 'The file to read \
-                            from containing the content of a block page']]
-
-class HTTPHost(httpt.HTTPTest):
-    """
-    This test is aimed at detecting the presence of a transparent HTTP proxy
-    and enumerating the sites that are being censored by it.
-
-    It places inside of the Host header field the hostname of the site that is
-    to be tested for censorship and then determines if the probe is behind a
-    transparent HTTP proxy (because the response from the backend server does
-    not match) and if the site is censorsed, by checking if the page that it
-    got back matches the input block page.
-    """
-    name = "HTTP Host"
-    author = "Arturo Filastò"
-    version = "0.2.3"
-
-    randomizeUA = False
-    usageOptions = UsageOptions
-
-    inputFile = ['file', 'f', None,
-            'List of hostnames to test for censorship']
-
-    requiredTestHelpers = {'backend': 'http-return-json-headers'}
-    requiredOptions = ['backend']
-
-    def test_filtering_prepend_newline_to_method(self):
-        headers = {}
-        headers["Host"] = [self.input]
-        return self.doRequest(self.localOptions['backend'], method="\nGET",
-                headers=headers)
-
-    def test_filtering_add_tab_to_host(self):
-        headers = {}
-        headers["Host"] = [self.input + '\t']
-        return self.doRequest(self.localOptions['backend'],
-                headers=headers)
-
-    def test_filtering_of_subdomain(self):
-        headers = {}
-        headers["Host"] = [randomStr(10) + '.' + self.input]
-        return self.doRequest(self.localOptions['backend'],
-                headers=headers)
-
-    def test_filtering_via_fuzzy_matching(self):
-        headers = {}
-        headers["Host"] = [randomStr(10) + self.input + randomStr(10)]
-        return self.doRequest(self.localOptions['backend'],
-                headers=headers)
-
-    def test_send_host_header(self):
-        """
-        Stuffs the HTTP Host header field with the site to be tested for
-        censorship and does an HTTP request of this kind to our backend.
-
-        We randomize the HTTP User Agent headers.
-        """
-        headers = {}
-        headers["Host"] = [self.input]
-        return self.doRequest(self.localOptions['backend'],
-                headers=headers)
-
-    def check_for_censorship(self, body):
-        """
-        If we have specified what a censorship page looks like here we will
-        check if the page we are looking at matches it.
-
-        XXX this is not tested, though it is basically what was used to detect
-        censorship in the palestine case.
-        """
-        if self.localOptions['content']:
-            self.report['censored'] = True
-            censorship_page = open(self.localOptions['content'])
-            response_page = iter(body.split("\n"))
-
-            for censorship_line in censorship_page.xreadlines():
-                response_line = response_page.next()
-                if response_line != censorship_line:
-                    self.report['censored'] = False
-                    break
-
-            censorship_page.close()
-        else:
-            self.report['censored'] = None
-
-    def processResponseBody(self, body):
-        """
-        XXX this is to be filled in with either a domclass based classified or
-        with a rule that will allow to detect that the body of the result is
-        that of a censored site.
-        """
-        # If we don't see a json array we know that something is wrong for
-        # sure
-        if not body.startswith("{"):
-            log.msg("This does not appear to be JSON")
-            self.report['transparent_http_proxy'] = True
-            self.check_for_censorship(body)
-            return
-        try:
-            content = json.loads(body)
-        except:
-            log.msg("The json does not parse, this is not what we expected")
-            self.report['transparent_http_proxy'] = True
-            self.check_for_censorship(body)
-            return
-
-        # We base the determination of the presence of a transparent HTTP
-        # proxy on the basis of the response containing the json that is to be
-        # returned by a HTTP Request Test Helper
-        if 'request_headers' in content and \
-                'request_line' in content and \
-                'headers_dict' in content:
-            log.msg("Found the keys I expected in %s" % content)
-            self.report['transparent_http_proxy'] = False
-            self.report['censored'] = False
-        else:
-            log.msg("Did not find the keys I expected in %s" % content)
-            self.report['transparent_http_proxy'] = True
-            self.check_for_censorship(body)
-
-    def inputProcessor(self, filename=None):
-        """
-        This inputProcessor extracts domain names from urls
-        """
-        if filename:
-            fp = open(filename)
-            for x in fp.readlines():
-                yield x.strip().split('//')[-1].split('/')[0]
-            fp.close()
-        else: pass
diff --git a/data/nettests/manipulation/http_invalid_request_line.py b/data/nettests/manipulation/http_invalid_request_line.py
deleted file mode 100644
index 64dbcac..0000000
--- a/data/nettests/manipulation/http_invalid_request_line.py
+++ /dev/null
@@ -1,108 +0,0 @@
-# -*- encoding: utf-8 -*-
-from twisted.python import usage
-
-from ooni.utils import log
-from ooni.utils import randomStr, randomSTR
-from ooni.templates import tcpt
-
-class UsageOptions(usage.Options):
-    optParameters = [['backend', 'b', '127.0.0.1',
-                        'The OONI backend that runs a TCP echo server'],
-                    ['backendport', 'p', 80, 'Specify the port that the TCP echo server is running (should only be set for debugging)']]
-
-class HTTPInvalidRequestLine(tcpt.TCPTest):
-    """
-    The goal of this test is to do some very basic and not very noisy fuzzing
-    on the HTTP request line. We generate a series of requests that are not
-    valid HTTP requests.
-
-    Unless elsewhere stated 'Xx'*N refers to N*2 random upper or lowercase
-    ascii letters or numbers ('XxXx' will be 4).
-    """
-    name = "HTTP Invalid Request Line"
-    version = "0.2"
-    authors = "Arturo Filastò"
-
-    usageOptions = UsageOptions
-
-    requiredTestHelpers = {'backend': 'tcp-echo'}
-    requiredOptions = ['backend']
-
-    def setUp(self):
-        self.port = int(self.localOptions['backendport'])
-        self.address = self.localOptions['backend']
-
-    def check_for_manipulation(self, response, payload):
-        log.debug("Checking if %s == %s" % (response, payload))
-        if response != payload:
-            self.report['tampering'] = True
-        else:
-            self.report['tampering'] = False
-
-    def test_random_invalid_method(self):
-        """
-        We test sending data to a TCP echo server listening on port 80, if what
-        we get back is not what we have sent then there is tampering going on.
-        This is for example what squid will return when performing such
-        request:
-
-            HTTP/1.0 400 Bad Request
-            Server: squid/2.6.STABLE21
-            Date: Sat, 23 Jul 2011 02:22:44 GMT
-            Content-Type: text/html
-            Content-Length: 1178
-            Expires: Sat, 23 Jul 2011 02:22:44 GMT
-            X-Squid-Error: ERR_INVALID_REQ 0
-            X-Cache: MISS from cache_server
-            X-Cache-Lookup: NONE from cache_server:3128
-            Via: 1.0 cache_server:3128 (squid/2.6.STABLE21)
-            Proxy-Connection: close
-
-        """
-        payload = randomSTR(4) + " / HTTP/1.1\n\r"
-
-        d = self.sendPayload(payload)
-        d.addCallback(self.check_for_manipulation, payload)
-        return d
-
-    def test_random_invalid_field_count(self):
-        """
-        This generates a request that looks like this:
-
-        XxXxX XxXxX XxXxX XxXxX
-
-        This may trigger some bugs in the HTTP parsers of transparent HTTP
-        proxies.
-        """
-        payload = ' '.join(randomStr(5) for x in range(4))
-        payload += "\n\r"
-
-        d = self.sendPayload(payload)
-        d.addCallback(self.check_for_manipulation, payload)
-        return d
-
-    def test_random_big_request_method(self):
-        """
-        This generates a request that looks like this:
-
-        Xx*512 / HTTP/1.1
-        """
-        payload = randomStr(1024) + ' / HTTP/1.1\n\r'
-
-        d = self.sendPayload(payload)
-        d.addCallback(self.check_for_manipulation, payload)
-        return d
-
-    def test_random_invalid_version_number(self):
-        """
-        This generates a request that looks like this:
-
-        GET / HTTP/XxX
-        """
-        payload = 'GET / HTTP/' + randomStr(3)
-        payload += '\n\r'
-
-        d = self.sendPayload(payload)
-        d.addCallback(self.check_for_manipulation, payload)
-        return d
-
diff --git a/data/nettests/manipulation/traceroute.py b/data/nettests/manipulation/traceroute.py
deleted file mode 100644
index 2db1826..0000000
--- a/data/nettests/manipulation/traceroute.py
+++ /dev/null
@@ -1,144 +0,0 @@
-# -*- encoding: utf-8 -*-
-#
-# :authors: Arturo Filastò
-# :licence: see LICENSE
-
-from twisted.python import usage
-from twisted.internet import defer
-
-from ooni.templates import scapyt
-
-from scapy.all import *
-
-from ooni.utils import log
-
-class UsageOptions(usage.Options):
-    optParameters = [
-                    ['backend', 'b', '8.8.8.8', 'Test backend to use'],
-                    ['timeout', 't', 5, 'The timeout for the traceroute test'],
-                    ['maxttl', 'm', 30, 'The maximum value of ttl to set on packets'],
-                    ['srcport', 'p', None, 'Set the source port to a specific value (only applies to TCP and UDP)']
-                    ]
-
-class TracerouteTest(scapyt.BaseScapyTest):
-    name = "Multi Protocol Traceroute Test"
-    author = "Arturo Filastò"
-    version = "0.2"
-
-    requiredTestHelpers = {'backend': 'traceroute'}
-    usageOptions = UsageOptions
-    dst_ports = [0, 22, 23, 53, 80, 123, 443, 8080, 65535]
-
-    def setUp(self):
-        def get_sport(protocol):
-            if self.localOptions['srcport']:
-                return int(self.localOptions['srcport'])
-            else:
-                return random.randint(1024, 65535)
-
-        self.get_sport = get_sport
-
-    def max_ttl_and_timeout(self):
-        max_ttl = int(self.localOptions['maxttl'])
-        timeout = int(self.localOptions['timeout'])
-        self.report['max_ttl'] = max_ttl
-        self.report['timeout'] = timeout
-        return max_ttl, timeout
-
-
-    def postProcessor(self, report):
-        tcp_hops = report['test_tcp_traceroute']
-        udp_hops = report['test_udp_traceroute']
-        icmp_hops = report['test_icmp_traceroute']
-
-
-    def test_tcp_traceroute(self):
-        """
-        Does a traceroute to the destination by sending TCP SYN packets
-        with TTLs from 1 until max_ttl.
-        """
-        def finished(packets, port):
-            log.debug("Finished running TCP traceroute test on port %s" % port)
-            answered, unanswered = packets
-            self.report['hops_'+str(port)] = []
-            for snd, rcv in answered:
-                try:
-                    sport = snd[UDP].sport
-                except IndexError:
-                    log.err("Source port for this traceroute was not found. This is probably a bug")
-                    sport = -1
-
-                report = {'ttl': snd.ttl,
-                        'address': rcv.src,
-                        'rtt': rcv.time - snd.time,
-                        'sport': sport
-                }
-                log.debug("%s: %s" % (port, report))
-                self.report['hops_'+str(port)].append(report)
-
-        dl = []
-        max_ttl, timeout = self.max_ttl_and_timeout()
-        for port in self.dst_ports:
-            packets = IP(dst=self.localOptions['backend'],
-                    ttl=(1,max_ttl),id=RandShort())/TCP(flags=0x2, dport=port,
-                            sport=self.get_sport('tcp'))
-
-            d = self.sr(packets, timeout=timeout)
-            d.addCallback(finished, port)
-            dl.append(d)
-        return defer.DeferredList(dl)
-
-    def test_udp_traceroute(self):
-        """
-        Does a traceroute to the destination by sending UDP packets with empty
-        payloads with TTLs from 1 until max_ttl.
-        """
-        def finished(packets, port):
-            log.debug("Finished running UDP traceroute test on port %s" % port)
-            answered, unanswered = packets
-            self.report['hops_'+str(port)] = []
-            for snd, rcv in answered:
-                report = {'ttl': snd.ttl,
-                        'address': rcv.src,
-                        'rtt': rcv.time - snd.time,
-                        'sport': snd[UDP].sport
-                }
-                log.debug("%s: %s" % (port, report))
-                self.report['hops_'+str(port)].append(report)
-        dl = []
-        max_ttl, timeout = self.max_ttl_and_timeout()
-        for port in self.dst_ports:
-            packets = IP(dst=self.localOptions['backend'],
-                    ttl=(1,max_ttl),id=RandShort())/UDP(dport=port,
-                            sport=self.get_sport('udp'))
-
-            d = self.sr(packets, timeout=timeout)
-            d.addCallback(finished, port)
-            dl.append(d)
-        return defer.DeferredList(dl)
-
-    def test_icmp_traceroute(self):
-        """
-        Does a traceroute to the destination by sending ICMP echo request
-        packets with TTLs from 1 until max_ttl.
-        """
-        def finished(packets):
-            log.debug("Finished running ICMP traceroute test")
-            answered, unanswered = packets
-            self.report['hops'] = []
-            for snd, rcv in answered:
-                report = {'ttl': snd.ttl,
-                        'address': rcv.src,
-                        'rtt': rcv.time - snd.time
-                }
-                log.debug("%s" % (report))
-                self.report['hops'].append(report)
-        dl = []
-        max_ttl, timeout = self.max_ttl_and_timeout()
-        packets = IP(dst=self.localOptions['backend'],
-                    ttl=(1,max_ttl), id=RandShort())/ICMP()
-
-        d = self.sr(packets, timeout=timeout)
-        d.addCallback(finished)
-        return d
-
diff --git a/data/nettests/scanning/__init__.py b/data/nettests/scanning/__init__.py
deleted file mode 100644
index e69de29..0000000
diff --git a/data/nettests/scanning/http_url_list.py b/data/nettests/scanning/http_url_list.py
deleted file mode 100644
index 0accaae..0000000
--- a/data/nettests/scanning/http_url_list.py
+++ /dev/null
@@ -1,98 +0,0 @@
-# -*- encoding: utf-8 -*-
-#
-# :authors: Arturo Filastò
-# :licence: see LICENSE
-
-from twisted.internet import defer
-from twisted.python import usage
-from ooni.templates import httpt
-from ooni.utils import log
-
-class UsageOptions(usage.Options):
-    optParameters = [['content', 'c', None,
-                        'The file to read from containing the content of a block page'],
-                     ['url', 'u', None, 'Specify a single URL to test.']
-                    ]
-
-class HTTPURLList(httpt.HTTPTest):
-    """
-    Performs GET, POST and PUT requests to a list of URLs specified as
-    input and checks if the page that we get back as a result matches that
-    of a block page given as input.
-
-    If no block page is given as input to the test it will simply collect the
-    responses to the HTTP requests and write them to a report file.
-    """
-    name = "HTTP URL List"
-    author = "Arturo Filastò"
-    version = "0.1.3"
-
-    usageOptions = UsageOptions
-
-    inputFile = ['file', 'f', None, 
-            'List of URLS to perform GET and POST requests to']
-
-    def setUp(self):
-        """
-        Check for inputs.
-        """
-        if self.input:
-            self.url = self.input
-        elif self.localOptions['url']:
-            self.url = self.localOptions['url']
-        else:
-            raise Exception("No input specified")
-
-    def check_for_content_censorship(self, body):
-        """
-        If we have specified what a censorship page looks like here we will
-        check if the page we are looking at matches it.
-
-        XXX this is not tested, though it is basically what was used to detect
-        censorship in the palestine case.
-        """
-        self.report['censored'] = True
-
-        censorship_page = open(self.localOptions['content']).xreadlines()
-        response_page = iter(body.split("\n"))
-
-        # We first allign the two pages to the first HTML tag (something
-        # starting with <). This is useful so that we can give as input to this
-        # test something that comes from the output of curl -kis
-        # http://the_page/
-        for line in censorship_page:
-            if line.strip().startswith("<"):
-                break
-        for line in response_page:
-            if line.strip().startswith("<"):
-                break
-
-        for censorship_line in censorship_page:
-            try:
-                response_line = response_page.next()
-            except StopIteration:
-                # The censored page and the response we got do not match in
-                # length.
-                self.report['censored'] = False
-                break
-            censorship_line = censorship_line.replace("\n", "")
-            if response_line != censorship_line:
-                self.report['censored'] = False
-
-        censorship_page.close()
-
-    def processResponseBody(self, body):
-        if self.localOptions['content']:
-            log.msg("Checking for censorship in response body")
-            self.check_for_content_censorship(body)
-
-    def test_get(self):
-        return self.doRequest(self.url, method="GET")
-
-    def test_post(self):
-        return self.doRequest(self.url, method="POST")
-
-    def test_put(self):
-        return self.doRequest(self.url, method="PUT")
-
-
diff --git a/data/nettests/third_party/Makefile b/data/nettests/third_party/Makefile
deleted file mode 100644
index 16adfe0..0000000
--- a/data/nettests/third_party/Makefile
+++ /dev/null
@@ -1,3 +0,0 @@
-fetch:
-	wget http://netalyzr.icsi.berkeley.edu/NetalyzrCLI.jar
-	chmod +x NetalyzrCLI.jar
diff --git a/data/nettests/third_party/README b/data/nettests/third_party/README
deleted file mode 100644
index d9e435f..0000000
--- a/data/nettests/third_party/README
+++ /dev/null
@@ -1,14 +0,0 @@
-There is no license for NetalyzrCLI.jar; so while we include it, it's just
-for ease of use.
-
-We currently support interfacing with the ICSI Netalyzr system by wrapping
-the NetalyzrCLI.jar client. It was downloaded on August 5th, 2011 from the
-following URL:
-  http://netalyzr.icsi.berkeley.edu/NetalyzrCLI.jar
-
-More information about the client is available on the cli web page:
-  http://netalyzr.icsi.berkeley.edu/cli.html
-
-After looking at NetalyzrCLI.jar, I discovered that '-d' runs it in a
-debugging mode that is quite useful for understanding their testing
-framework as it runs.
diff --git a/data/nettests/third_party/__init__.py b/data/nettests/third_party/__init__.py
deleted file mode 100644
index e69de29..0000000
diff --git a/data/nettests/third_party/netalyzr.py b/data/nettests/third_party/netalyzr.py
deleted file mode 100644
index 9b21831..0000000
--- a/data/nettests/third_party/netalyzr.py
+++ /dev/null
@@ -1,58 +0,0 @@
-# -*- encoding: utf-8 -*-
-#
-# This is a wrapper around the Netalyzer Java command line client
-#
-# :authors: Jacob Appelbaum <jacob@xxxxxxxxxxxxx>
-#           Arturo "hellais" Filastò <art@xxxxxxxxx>
-# :licence: see LICENSE
-
-from ooni import nettest
-from ooni.utils import log
-import time
-import os
-from twisted.internet import reactor, threads, defer
-
-class NetalyzrWrapperTest(nettest.NetTestCase):
-    name = "NetalyzrWrapper"
-
-    def setUp(self):
-        cwd = os.path.abspath(os.path.join(os.path.abspath(__file__), '..'))
-
-        # XXX set the output directory to something more uniform
-        outputdir = os.path.join(cwd, '..', '..')
-
-        program_path = os.path.join(cwd, 'NetalyzrCLI.jar')
-        program = "java -jar %s -d" % program_path
-
-        test_token = time.asctime(time.gmtime()).replace(" ", "_").strip()
-
-        self.output_file = os.path.join(outputdir,
-                "NetalyzrCLI_" + test_token + ".out")
-        self.output_file.strip()
-        self.run_me = program + " 2>&1 >> " + self.output_file
-
-    def blocking_call(self):
-        try:
-            result = threads.blockingCallFromThread(reactor, os.system, self.run_me) 
-        except:
-            log.debug("Netalyzr had an error, please see the log file: %s" % self.output_file)
-        finally:
-            self.clean_up()
-
-    def clean_up(self):
-        self.report['netalyzr_report'] = self.output_file
-        log.debug("finished running NetalzrWrapper")
-        log.debug("Please check %s for Netalyzr output" % self.output_file)
-
-    def test_run_netalyzr(self):
-        """
-        This test simply wraps netalyzr and runs it from command line
-        """
-        log.msg("Running NetalyzrWrapper (this will take some time, be patient)")
-        log.debug("with command '%s'" % self.run_me)
-        # XXX we probably want to use a processprotocol here to obtain the
-        # stdout from Netalyzr. This would allows us to visualize progress
-        # (currently there is no progress because the stdout of os.system is
-        # trapped by twisted) and to include the link to the netalyzr report
-        # directly in the OONI report, perhaps even downloading it.
-        reactor.callInThread(self.blocking_call)
diff --git a/ooni/nettests/__init__.py b/ooni/nettests/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/ooni/nettests/blocking/__init__.py b/ooni/nettests/blocking/__init__.py
new file mode 100644
index 0000000..8b13789
--- /dev/null
+++ b/ooni/nettests/blocking/__init__.py
@@ -0,0 +1 @@
+
diff --git a/ooni/nettests/blocking/dnsconsistency.py b/ooni/nettests/blocking/dnsconsistency.py
new file mode 100644
index 0000000..3c88cd2
--- /dev/null
+++ b/ooni/nettests/blocking/dnsconsistency.py
@@ -0,0 +1,175 @@
+# -*- encoding: utf-8 -*-
+#
+#  dnsconsistency
+#  **************
+#
+#  The test reports censorship if the cardinality of the intersection of
+#  the query result set from the control server and the query result set
+#  from the experimental server is zero, which is to say, if the two sets
+#  have no matching results whatsoever.
+#
+#  NOTE: This test frequently results in false positives due to GeoIP-based
+#  load balancing on major global sites such as google, facebook, and
+#  youtube, etc.
+#
+# :authors: Arturo Filastò, Isis Lovecruft
+# :licence: see LICENSE
+
+import pdb
+
+from twisted.python import usage
+from twisted.internet import defer
+
+from ooni.templates import dnst
+
+from ooni import nettest
+from ooni.utils import log
+
+class UsageOptions(usage.Options):
+    optParameters = [['backend', 'b', '8.8.8.8:53',
+                        'The OONI backend that runs the DNS resolver'],
+                     ['testresolvers', 'T', None,
+                        'File containing list of DNS resolvers to test against'],
+                     ['testresolver', 't', None,
+                         'Specify a single test resolver to use for testing']
+                    ]
+
+class DNSConsistencyTest(dnst.DNSTest):
+
+    name = "DNS Consistency"
+    description = "DNS censorship detection test"
+    version = "0.6"
+    authors = "Arturo Filastò, Isis Lovecruft"
+    requirements = None
+
+    inputFile = ['file', 'f', None,
+                 'Input file of list of hostnames to attempt to resolve']
+    
+    requiredTestHelpers = {'backend': 'dns'}
+
+    usageOptions = UsageOptions
+    requiredOptions = ['backend', 'file']
+
+    def setUp(self):
+        if (not self.localOptions['testresolvers'] and \
+                not self.localOptions['testresolver']):
+            raise usage.UsageError("You did not specify a testresolver")
+
+        elif self.localOptions['testresolvers']:
+            test_resolvers_file = self.localOptions['testresolvers']
+
+        elif self.localOptions['testresolver']:
+            self.test_resolvers = [self.localOptions['testresolver']]
+
+        try:
+            with open(test_resolvers_file) as f:
+                self.test_resolvers = [x.split('#')[0].strip() for x in f.readlines()]
+                self.report['test_resolvers'] = self.test_resolvers
+            f.close()
+
+        except IOError, e:
+            log.exception(e)
+            raise usage.UsageError("Invalid test resolvers file")
+
+        except NameError:
+            log.debug("No test resolver file configured")
+
+        dns_ip, dns_port = self.localOptions['backend'].split(':')
+        self.control_dns_server = (dns_ip, int(dns_port))
+
+        self.report['control_resolver'] = self.control_dns_server
+
+    @defer.inlineCallbacks
+    def test_a_lookup(self):
+        """
+        We perform an A lookup on the DNS test servers for the domains to be
+        tested and an A lookup on the known good DNS server.
+
+        We then compare the results from test_resolvers and that from
+        control_resolver and see if the match up.
+        If they match up then no censorship is happening (tampering: false).
+
+        If they do not we do a reverse lookup (PTR) on the test_resolvers and
+        the control resolver for every IP address we got back and check to see
+        if anyone of them matches the control ones.
+
+        If they do then we take not of the fact that censorship is probably not
+        happening (tampering: reverse-match).
+
+        If they do not match then censorship is probably going on (tampering:
+        true).
+        """
+        log.msg("Doing the test lookups on %s" % self.input)
+        list_of_ds = []
+        hostname = self.input
+
+        self.report['tampering'] = {}
+
+        control_answers = yield self.performALookup(hostname, self.control_dns_server)
+        if not control_answers:
+                log.err("Got no response from control DNS server %s," \
+                        " perhaps the DNS resolver is down?" % self.control_dns_server[0])
+                self.report['tampering'][self.control_dns_server] = 'no_answer'
+                return
+
+        for test_resolver in self.test_resolvers:
+            log.msg("Testing resolver: %s" % test_resolver)
+            test_dns_server = (test_resolver, 53)
+
+            try:
+                experiment_answers = yield self.performALookup(hostname, test_dns_server)
+            except Exception, e:
+                log.err("Problem performing the DNS lookup")
+                log.exception(e)
+                self.report['tampering'][test_resolver] = 'dns_lookup_error'
+                continue
+
+            if not experiment_answers:
+                log.err("Got no response, perhaps the DNS resolver is down?")
+                self.report['tampering'][test_resolver] = 'no_answer'
+                continue
+            else:
+                log.debug("Got the following A lookup answers %s from %s" % (experiment_answers, test_resolver))
+
+            def lookup_details():
+                """
+                A closure useful for printing test details.
+                """
+                log.msg("test resolver: %s" % test_resolver)
+                log.msg("experiment answers: %s" % experiment_answers)
+                log.msg("control answers: %s" % control_answers)
+
+            log.debug("Comparing %s with %s" % (experiment_answers, control_answers))
+            if set(experiment_answers) & set(control_answers):
+                lookup_details()
+                log.msg("tampering: false")
+                self.report['tampering'][test_resolver] = False
+            else:
+                log.msg("Trying to do reverse lookup")
+
+                experiment_reverse = yield self.performPTRLookup(experiment_answers[0], test_dns_server)
+                control_reverse = yield self.performPTRLookup(control_answers[0], self.control_dns_server)
+
+                if experiment_reverse == control_reverse:
+                    log.msg("Further testing has eliminated false positives")
+                    lookup_details()
+                    log.msg("tampering: reverse_match")
+                    self.report['tampering'][test_resolver] = 'reverse_match'
+                else:
+                    log.msg("Reverse lookups do not match")
+                    lookup_details()
+                    log.msg("tampering: true")
+                    self.report['tampering'][test_resolver] = True
+
+    def inputProcessor(self, filename=None):
+        """
+        This inputProcessor extracts domain names from urls
+        """
+        log.debug("Running dnsconsistency default processor")
+        if filename:
+            fp = open(filename)
+            for x in fp.readlines():
+                yield x.strip().split('//')[-1].split('/')[0]
+            fp.close()
+        else:
+            pass
diff --git a/ooni/nettests/blocking/http_requests.py b/ooni/nettests/blocking/http_requests.py
new file mode 100644
index 0000000..8c74762
--- /dev/null
+++ b/ooni/nettests/blocking/http_requests.py
@@ -0,0 +1,129 @@
+# -*- encoding: utf-8 -*-
+#
+# :authors: Arturo Filastò
+# :licence: see LICENSE
+
+import random
+from twisted.internet import defer
+from twisted.python import usage
+
+from ooni.utils import log
+from ooni.utils.net import userAgents
+from ooni.templates import httpt
+from ooni.errors import failureToString, handleAllFailures
+
+class UsageOptions(usage.Options):
+    optParameters = [
+                     ['url', 'u', None, 'Specify a single URL to test.'],
+                     ['factor', 'f', 0.8, 'What factor should be used for triggering censorship (0.8 == 80%)']
+                    ]
+
+class HTTPRequestsTest(httpt.HTTPTest):
+    """
+    Performs a two GET requests to the set of sites to be tested for
+    censorship, one over a known good control channel (Tor), the other over the
+    test network.
+
+    We check to see if the response headers match and if the response body
+    lengths match.
+    """
+    name = "HTTP Requests Test"
+    author = "Arturo Filastò"
+    version = "0.2.3"
+
+    usageOptions = UsageOptions
+
+    inputFile = ['file', 'f', None,
+            'List of URLS to perform GET and POST requests to']
+
+    # These values are used for determining censorship based on response body
+    # lengths
+    control_body_length = None
+    experiment_body_length = None
+
+    def setUp(self):
+        """
+        Check for inputs.
+        """
+        if self.input:
+            self.url = self.input
+        elif self.localOptions['url']:
+            self.url = self.localOptions['url']
+        else:
+            raise Exception("No input specified")
+
+        self.factor = self.localOptions['factor']
+        self.report['control_failure'] = None
+        self.report['experiment_failure'] = None
+
+    def compare_body_lengths(self, body_length_a, body_length_b):
+
+        if body_length_b == 0 and body_length_a != 0:
+            rel = float(body_length_b)/float(body_length_a)
+        elif body_length_b == 0 and body_length_a == 0:
+            rel = float(1)
+        else:
+            rel = float(body_length_a)/float(body_length_b)
+
+        if rel > 1:
+            rel = 1/rel
+
+        self.report['body_proportion'] = rel
+        self.report['factor'] = float(self.factor)
+        if rel > float(self.factor):
+            log.msg("The two body lengths appear to match")
+            log.msg("censorship is probably not happening")
+            self.report['body_length_match'] = True
+        else:
+            log.msg("The two body lengths appear to not match")
+            log.msg("censorship could be happening")
+            self.report['body_length_match'] = False
+
+    def compare_headers(self, headers_a, headers_b):
+        diff = headers_a.getDiff(headers_b)
+        if diff:
+            log.msg("Headers appear to *not* match")
+            self.report['headers_diff'] = diff
+            self.report['headers_match'] = False
+        else:
+            log.msg("Headers appear to match")
+            self.report['headers_diff'] = diff
+            self.report['headers_match'] = True
+
+    def test_get(self):
+        def callback(res):
+            experiment, control = res
+            experiment_succeeded, experiment_result = experiment
+            control_succeeded, control_result = control
+
+            if control_succeeded and experiment_succeeded:
+                self.compare_body_lengths(len(experiment_result.body),
+                        len(control_result.body))
+
+                self.compare_headers(control_result.headers,
+                        experiment_result.headers)
+
+            if not control_succeeded:
+                self.report['control_failure'] = failureToString(control_result)
+
+            if not experiment_succeeded:
+                self.report['experiment_failure'] = failureToString(experiment_result)
+
+        headers = {'User-Agent': [random.choice(userAgents)]}
+
+        l = []
+        log.msg("Performing GET request to %s" % self.url)
+        experiment_request = self.doRequest(self.url, method="GET",
+                headers=headers)
+
+        control_request = self.doRequest(self.url, method="GET",
+                use_tor=True, headers=headers)
+
+        l.append(experiment_request)
+        l.append(control_request)
+
+        dl = defer.DeferredList(l, consumeErrors=True)
+        dl.addCallback(callback)
+
+        return dl
+
diff --git a/ooni/nettests/blocking/tcpconnect.py b/ooni/nettests/blocking/tcpconnect.py
new file mode 100644
index 0000000..5b432e0
--- /dev/null
+++ b/ooni/nettests/blocking/tcpconnect.py
@@ -0,0 +1,69 @@
+# -*- encoding: utf-8 -*-
+from twisted.internet.protocol import Factory, Protocol
+from twisted.internet.endpoints import TCP4ClientEndpoint
+
+from twisted.internet.error import ConnectionRefusedError
+from twisted.internet.error import TCPTimedOutError, TimeoutError
+
+from ooni import nettest
+from ooni.errors import handleAllFailures
+from ooni.utils import log
+
+class TCPFactory(Factory):
+    def buildProtocol(self, addr):
+        return Protocol()
+
+class TCPConnectTest(nettest.NetTestCase):
+    name = "TCP Connect"
+    author = "Arturo Filastò"
+    version = "0.1"
+    inputFile = ['file', 'f', None,
+            'File containing the IP:PORT combinations to be tested, one per line']
+
+    requiredOptions = ['file']
+    def test_connect(self):
+        """
+        This test performs a TCP connection to the remote host on the specified port.
+        the report will contains the string 'success' if the test has
+        succeeded, or the reason for the failure if it has failed.
+        """
+        host, port = self.input.split(":")
+        def connectionSuccess(protocol):
+            protocol.transport.loseConnection()
+            log.debug("Got a connection to %s" % self.input)
+            self.report["connection"] = 'success'
+
+        def connectionFailed(failure):
+            self.report['connection'] = handleAllFailures(failure)
+
+        from twisted.internet import reactor
+        point = TCP4ClientEndpoint(reactor, host, int(port))
+        d = point.connect(TCPFactory())
+        d.addCallback(connectionSuccess)
+        d.addErrback(connectionFailed)
+        return d
+
+    def inputProcessor(self, filename=None):
+        """
+        This inputProcessor extracts name:port pairs from urls
+        XXX: Does not support unusual port numbers
+        """
+        def strip_url(address):
+            proto, path = x.strip().split('://')
+            proto = proto.lower()
+            host = path.split('/')[0]
+            if proto == 'http':
+                return "%s:80" % host
+            if proto == 'https':
+                return "%s:443" % host
+
+        if filename:
+            fp = open(filename)
+            for x in fp.readlines():
+                if x.startswith("http"):
+                    yield strip_url(x)
+                else:
+                    yield x.strip()
+            fp.close()
+        else:
+            pass
diff --git a/ooni/nettests/experimental/__init__.py b/ooni/nettests/experimental/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/ooni/nettests/experimental/bridge_reachability/bridget.py b/ooni/nettests/experimental/bridge_reachability/bridget.py
new file mode 100644
index 0000000..acf3dff
--- /dev/null
+++ b/ooni/nettests/experimental/bridge_reachability/bridget.py
@@ -0,0 +1,462 @@
+#!/usr/bin/env python
+# -*- encoding: utf-8 -*-
+#
+#  +-----------+
+#  |  BRIDGET  |
+#  |        +--------------------------------------------+
+#  +--------| Use a Tor process to test making a Tor     |
+#           | connection to a list of bridges or relays. |
+#           +--------------------------------------------+
+#
+# :authors: Isis Lovecruft, Arturo Filasto
+# :licence: see included LICENSE
+# :version: 0.1.0-alpha
+
+from __future__           import with_statement
+from functools            import partial
+from random               import randint
+
+import os
+import sys
+
+from twisted.python       import usage
+from twisted.internet     import defer, error, reactor
+
+from ooni                 import nettest
+
+from ooni.utils           import log, date
+from ooni.utils.config    import ValueChecker
+
+from ooni.utils.onion     import TxtorconImportError
+from ooni.utils.onion     import PTNoBridgesException, PTNotFoundException
+
+
+try:
+    from ooni.utils.onion     import parse_data_dir
+except:
+    log.msg("Please go to /ooni/lib and do 'make txtorcon' to run this test!")
+
+class MissingAssetException(Exception):
+    pass
+
+class RandomPortException(Exception):
+    """Raised when using a random port conflicts with configured ports."""
+    def __init__(self):
+        log.msg("Unable to use random and specific ports simultaneously")
+        return sys.exit()
+
+class BridgetArgs(usage.Options):
+    """Commandline options."""
+    allowed = "Port to use for Tor's %s, must be between 1024 and 65535."
+    sock_check = ValueChecker(allowed % "SocksPort").port_check
+    ctrl_check = ValueChecker(allowed % "ControlPort").port_check
+
+    optParameters = [
+        ['bridges', 'b', None,
+         'File listing bridge IP:ORPorts to test'],
+        ['relays', 'f', None,
+         'File listing relay IPs to test'],
+        ['socks', 's', 9049, None, sock_check],
+        ['control', 'c', 9052, None, ctrl_check],
+        ['torpath', 'p', None,
+         'Path to the Tor binary to use'],
+        ['datadir', 'd', None,
+         'Tor DataDirectory to use'],
+        ['transport', 't', None,
+         'Tor ClientTransportPlugin'],
+        ['resume', 'r', 0,
+         'Resume at this index']]
+    optFlags = [['random', 'x', 'Use random ControlPort and SocksPort']]
+
+    def postOptions(self):
+        if not self['bridges'] and not self['relays']:
+            raise MissingAssetException(
+                "Bridget can't run without bridges or relays to test!")
+        if self['transport']:
+            ValueChecker.uid_check(
+                "Can't run bridget as root with pluggable transports!")
+            if not self['bridges']:
+                raise PTNoBridgesException
+        if self['socks'] or self['control']:
+            if self['random']:
+                raise RandomPortException
+        if self['datadir']:
+            ValueChecker.dir_check(self['datadir'])
+        if self['torpath']:
+            ValueChecker.file_check(self['torpath'])
+
+class BridgetTest(nettest.NetTestCase):
+    """
+    XXX fill me in
+
+    :ivar config:
+        An :class:`ooni.lib.txtorcon.TorConfig` instance.
+    :ivar relays:
+        A list of all provided relays to test.
+    :ivar bridges:
+        A list of all provided bridges to test.
+    :ivar socks_port:
+        Integer for Tor's SocksPort.
+    :ivar control_port:
+        Integer for Tor's ControlPort.
+    :ivar transport:
+        String defining the Tor's ClientTransportPlugin, for testing
+        a bridge's pluggable transport functionality.
+    :ivar tor_binary:
+        Path to the Tor binary to use, e.g. \'/usr/sbin/tor\'
+    """
+    name    = "bridget"
+    author  = "Isis Lovecruft <isis@xxxxxxxxxxxxxx>"
+    version = "0.1"
+    description   = "Use a Tor process to test connecting to bridges or relays"
+    usageOptions = BridgetArgs
+
+    def setUp(self):
+        """
+        Extra initialization steps. We only want one child Tor process
+        running, so we need to deal with most of the TorConfig() only once,
+        before the experiment runs.
+        """
+        self.socks_port      = 9049
+        self.control_port    = 9052
+        self.circuit_timeout = 90
+        self.tor_binary      = '/usr/sbin/tor'
+        self.data_directory  = None
+
+        def read_from_file(filename):
+            log.msg("Loading information from %s ..." % opt)
+            with open(filename) as fp:
+                lst = []
+                for line in fp.readlines():
+                    if line.startswith('#'):
+                        continue
+                    else:
+                        lst.append(line.replace('\n',''))
+                return lst
+
+        def __count_remaining__(which):
+            total, reach, unreach = map(lambda x: which[x],
+                                        ['all', 'reachable', 'unreachable'])
+            count = len(total) - reach() - unreach()
+            return count
+
+        ## XXX should we do report['bridges_up'].append(self.bridges['current'])
+        self.bridges = {}
+        self.bridges['all'], self.bridges['up'], self.bridges['down'] = \
+            ([] for i in range(3))
+        self.bridges['reachable']   = lambda: len(self.bridges['up'])
+        self.bridges['unreachable'] = lambda: len(self.bridges['down'])
+        self.bridges['remaining']   = lambda: __count_remaining__(self.bridges)
+        self.bridges['current']     = None
+        self.bridges['pt_type']     = None
+        self.bridges['use_pt']      = False
+
+        self.relays = {}
+        self.relays['all'], self.relays['up'], self.relays['down'] = \
+            ([] for i in range(3))
+        self.relays['reachable']   = lambda: len(self.relays['up'])
+        self.relays['unreachable'] = lambda: len(self.relays['down'])
+        self.relays['remaining']   = lambda: __count_remaining__(self.relays)
+        self.relays['current']     = None
+
+        if self.localOptions:
+            try:
+                from txtorcon import TorConfig
+            except ImportError:
+                raise TxtorconImportError
+            else:
+                self.config = TorConfig()
+            finally:
+                options = self.localOptions
+
+            if options['bridges']:
+                self.config.UseBridges = 1
+                self.bridges['all'] = read_from_file(options['bridges'])
+            if options['relays']:
+                ## first hop must be in TorState().guards
+                # XXX where is this defined?
+                self.config.EntryNodes = ','.join(relay_list)
+                self.relays['all'] = read_from_file(options['relays'])
+            if options['socks']:
+                self.socks_port = options['socks']
+            if options['control']:
+                self.control_port = options['control']
+            if options['random']:
+                log.msg("Using randomized ControlPort and SocksPort ...")
+                self.socks_port   = randint(1024, 2**16)
+                self.control_port = randint(1024, 2**16)
+            if options['torpath']:
+                self.tor_binary = options['torpath']
+            if options['datadir']:
+                self.data_directory = parse_data_dir(options['datadir'])
+            if options['transport']:
+                ## ClientTransportPlugin transport exec pathtobinary [options]
+                ## XXX we need a better way to deal with all PTs
+                log.msg("Using ClientTransportPlugin %s" % options['transport'])
+                self.bridges['use_pt'] = True
+                [self.bridges['pt_type'], pt_exec] = \
+                    options['transport'].split(' ', 1)
+
+                if self.bridges['pt_type'] == "obfs2":
+                    self.config.ClientTransportPlugin = \
+                        self.bridges['pt_type'] + " " + pt_exec
+                else:
+                    raise PTNotFoundException
+
+            self.config.SocksPort            = self.socks_port
+            self.config.ControlPort          = self.control_port
+            self.config.CookieAuthentication = 1
+
+    def test_bridget(self):
+        """
+        if bridges:
+            1. configure first bridge line
+            2a. configure data_dir, if it doesn't exist
+            2b. write torrc to a tempfile in data_dir
+            3. start tor                              } if any of these
+            4. remove bridges which are public relays } fail, add current
+            5. SIGHUP for each bridge                 } bridge to unreach-
+                                                      } able bridges.
+        if relays:
+            1a. configure the data_dir, if it doesn't exist
+            1b. write torrc to a tempfile in data_dir
+            2. start tor
+            3. remove any of our relays which are already part of current
+               circuits
+            4a. attach CustomCircuit() to self.state
+            4b. RELAY_EXTEND for each relay } if this fails, add
+                                            } current relay to list
+                                            } of unreachable relays
+            5.
+        if bridges and relays:
+            1. configure first bridge line
+            2a. configure data_dir if it doesn't exist
+            2b. write torrc to a tempfile in data_dir
+            3. start tor
+            4. remove bridges which are public relays
+            5. remove any of our relays which are already part of current
+               circuits
+            6a. attach CustomCircuit() to self.state
+            6b. for each bridge, build three circuits, with three
+                relays each
+            6c. RELAY_EXTEND for each relay } if this fails, add
+                                            } current relay to list
+                                            } of unreachable relays
+
+        :param args:
+            The :class:`BridgetAsset` line currently being used. Except that it
+            in Bridget it doesn't, so it should be ignored and avoided.
+        """
+        try:
+            from ooni.utils         import process
+            from ooni.utils.onion   import remove_public_relays, start_tor
+            from ooni.utils.onion   import start_tor_filter_nodes
+            from ooni.utils.onion   import setup_fail, setup_done
+            from ooni.utils.onion   import CustomCircuit
+            from ooni.utils.timer   import deferred_timeout, TimeoutError
+            from ooni.lib.txtorcon  import TorConfig, TorState
+        except ImportError:
+            raise TxtorconImportError
+        except TxtorconImportError, tie:
+            log.err(tie)
+            sys.exit()
+
+        def reconfigure_done(state, bridges):
+            """
+            Append :ivar:`bridges['current']` to the list
+            :ivar:`bridges['up'].
+            """
+            log.msg("Reconfiguring with 'Bridge %s' successful"
+                    % bridges['current'])
+            bridges['up'].append(bridges['current'])
+            return state
+
+        def reconfigure_fail(state, bridges):
+            """
+            Append :ivar:`bridges['current']` to the list
+            :ivar:`bridges['down'].
+            """
+            log.msg("Reconfiguring TorConfig with parameters %s failed"
+                    % state)
+            bridges['down'].append(bridges['current'])
+            return state
+
+        @defer.inlineCallbacks
+        def reconfigure_bridge(state, bridges):
+            """
+            Rewrite the Bridge line in our torrc. If use of pluggable
+            transports was specified, rewrite the line as:
+                Bridge <transport_type> <IP>:<ORPort>
+            Otherwise, rewrite in the standard form:
+                Bridge <IP>:<ORPort>
+
+            :param state:
+                A fully bootstrapped instance of
+                :class:`ooni.lib.txtorcon.TorState`.
+            :param bridges:
+                A dictionary of bridges containing the following keys:
+
+                bridges['remaining'] :: A function returning and int for the
+                                        number of remaining bridges to test.
+                bridges['current']   :: A string containing the <IP>:<ORPort>
+                                        of the current bridge.
+                bridges['use_pt']    :: A boolean, True if we're testing
+                                        bridges with a pluggable transport;
+                                        False otherwise.
+                bridges['pt_type']   :: If :ivar:`bridges['use_pt'] is True,
+                                        this is a string containing the type
+                                        of pluggable transport to test.
+            :return:
+                :param:`state`
+            """
+            log.msg("Current Bridge: %s" % bridges['current'])
+            log.msg("We now have %d bridges remaining to test..."
+                    % bridges['remaining']())
+            try:
+                if bridges['use_pt'] is False:
+                    controller_response = yield state.protocol.set_conf(
+                        'Bridge', bridges['current'])
+                elif bridges['use_pt'] and bridges['pt_type'] is not None:
+                    controller_reponse = yield state.protocol.set_conf(
+                        'Bridge', bridges['pt_type'] +' '+ bridges['current'])
+                else:
+                    raise PTNotFoundException
+
+                if controller_response == 'OK':
+                    finish = yield reconfigure_done(state, bridges)
+                else:
+                    log.err("SETCONF for %s responded with error:\n %s"
+                            % (bridges['current'], controller_response))
+                    finish = yield reconfigure_fail(state, bridges)
+
+                defer.returnValue(finish)
+
+            except Exception, e:
+                log.err("Reconfiguring torrc with Bridge line %s failed:\n%s"
+                        % (bridges['current'], e))
+                defer.returnValue(None)
+
+        def attacher_extend_circuit(attacher, deferred, router):
+            ## XXX todo write me
+            ## state.attacher.extend_circuit
+            raise NotImplemented
+            #attacher.extend_circuit
+
+        def state_attach(state, path):
+            log.msg("Setting up custom circuit builder...")
+            attacher = CustomCircuit(state)
+            state.set_attacher(attacher, reactor)
+            state.add_circuit_listener(attacher)
+            return state
+
+            ## OLD
+            #for circ in state.circuits.values():
+            #    for relay in circ.path:
+            #        try:
+            #            relay_list.remove(relay)
+            #        except KeyError:
+            #            continue
+            ## XXX how do we attach to circuits with bridges?
+            d = defer.Deferred()
+            attacher.request_circuit_build(d)
+            return d
+
+        def state_attach_fail(state):
+            log.err("Attaching custom circuit builder failed: %s" % state)
+
+        log.msg("Bridget: initiating test ... ")  ## Start the experiment
+
+        ## if we've at least one bridge, and our config has no 'Bridge' line
+        if self.bridges['remaining']() >= 1 \
+                and not 'Bridge' in self.config.config:
+
+            ## configure our first bridge line
+            self.bridges['current'] = self.bridges['all'][0]
+            self.config.Bridge = self.bridges['current']
+                                                  ## avoid starting several
+            self.config.save()                    ## processes
+            assert self.config.config.has_key('Bridge'), "No Bridge Line"
+
+            ## start tor and remove bridges which are public relays
+            from ooni.utils.onion import start_tor_filter_nodes
+            state = start_tor_filter_nodes(reactor, self.config,
+                                           self.control_port, self.tor_binary,
+                                           self.data_directory, self.bridges)
+            #controller = defer.Deferred()
+            #controller.addCallback(singleton_semaphore, tor)
+            #controller.addErrback(setup_fail)
+            #bootstrap = defer.gatherResults([controller, filter_bridges],
+            #                                consumeErrors=True)
+
+            if state is not None:
+                log.debug("state:\n%s" % state)
+                log.debug("Current callbacks on TorState():\n%s"
+                          % state.callbacks)
+
+        ## if we've got more bridges
+        if self.bridges['remaining']() >= 2:
+            #all = []
+            for bridge in self.bridges['all'][1:]:
+                self.bridges['current'] = bridge
+                #new = defer.Deferred()
+                #new.addCallback(reconfigure_bridge, state, self.bridges)
+                #all.append(new)
+            #check_remaining = defer.DeferredList(all, consumeErrors=True)
+            #state.chainDeferred(check_remaining)
+                state.addCallback(reconfigure_bridge, self.bridges)
+
+        if self.relays['remaining']() > 0:
+            while self.relays['remaining']() >= 3:
+                #path = list(self.relays.pop() for i in range(3))
+                #log.msg("Trying path %s" % '->'.join(map(lambda node:
+                #                                         node, path)))
+                self.relays['current'] = self.relays['all'].pop()
+                for circ in state.circuits.values():
+                    for node in circ.path:
+                        if node == self.relays['current']:
+                            self.relays['up'].append(self.relays['current'])
+                    if len(circ.path) < 3:
+                        try:
+                            ext = attacher_extend_circuit(state.attacher, circ,
+                                                          self.relays['current'])
+                            ext.addCallback(attacher_extend_circuit_done,
+                                            state.attacher, circ,
+                                            self.relays['current'])
+                        except Exception, e:
+                            log.err("Extend circuit failed: %s" % e)
+                    else:
+                        continue
+
+        #state.callback(all)
+        #self.reactor.run()
+        return state
+
+    def disabled_startTest(self, args):
+        """
+        Local override of :meth:`OONITest.startTest` to bypass calling
+        self.control.
+
+        :param args:
+            The current line of :class:`Asset`, not used but kept for
+            compatibility reasons.
+        :return:
+            A fired deferred which callbacks :meth:`experiment` and
+            :meth:`OONITest.finished`.
+        """
+        self.start_time = date.now()
+        self.d = self.experiment(args)
+        self.d.addErrback(log.err)
+        self.d.addCallbacks(self.finished, log.err)
+        return self.d
+
+## ISIS' NOTES
+## -----------
+## TODO:
+##       x  cleanup documentation
+##       x  add DataDirectory option
+##       x  check if bridges are public relays
+##       o  take bridge_desc file as input, also be able to give same
+##          format as output
+##       x  Add asynchronous timeout for deferred, so that we don't wait
+##       o  Add assychronous timout for deferred, so that we don't wait
+##          forever for bridges that don't work.
diff --git a/ooni/nettests/experimental/bridge_reachability/echo.py b/ooni/nettests/experimental/bridge_reachability/echo.py
new file mode 100644
index 0000000..d4033dd
--- /dev/null
+++ b/ooni/nettests/experimental/bridge_reachability/echo.py
@@ -0,0 +1,132 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+#
+#  +---------+
+#  | echo.py |
+#  +---------+
+#     A simple ICMP-8 ping test.
+#
+# @authors: Isis Lovecruft, <isis@xxxxxxxxxxxxxx>
+# @version: 0.0.2-pre-alpha
+# @license: copyright (c) 2012 Isis Lovecruft
+#           see attached LICENCE file
+#
+
+import os
+import sys
+
+from twisted.python   import usage
+from twisted.internet import reactor, defer
+from ooni             import nettest
+from ooni.utils       import log, net, Storage, txscapy
+
+try:
+    from scapy.all             import IP, ICMP
+    from scapy.all             import sr1
+    from ooni.lib              import txscapy
+    from ooni.lib.txscapy      import txsr, txsend
+    from ooni.templates.scapyt import BaseScapyTest
+except:
+    log.msg("This test requires scapy, see www.secdev.org/projects/scapy")
+
+class UsageOptions(usage.Options):
+    optParameters = [
+        ['dst', 'd', None, 'Host IP to ping'],
+        ['file', 'f', None, 'File of list of IPs to ping'],
+        ['interface', 'i', None, 'Network interface to use'],
+        ['count', 'c', 1, 'Number of packets to send', int],
+        ['size', 's', 56, 'Number of bytes to send in ICMP data field', int],
+        ['ttl', 'l', 25, 'Set the IP Time to Live', int],
+        ['timeout', 't', 2, 'Seconds until timeout if no response', int],
+        ['pcap', 'p', None, 'Save pcap to this file'],
+        ['receive', 'r', True, 'Receive response packets']]
+
+class EchoTest(nettest.NetTestCase):
+    """
+    xxx fill me in
+    """
+    name         = 'echo'
+    author       = 'Isis Lovecruft <isis@xxxxxxxxxxxxxx>'
+    description  = 'A simple ping test to see if a host is reachable.'
+    version      = '0.0.2'
+    requiresRoot = True
+
+    usageOptions    = UsageOptions
+    #requiredOptions = ['dst']
+
+    def setUp(self, *a, **kw):
+        self.destinations = {}
+
+        if self.localOptions:
+            for key, value in self.localOptions.items():
+                log.debug("setting self.%s = %s" % (key, value))
+                setattr(self, key, value)
+
+        self.timeout *= 1000            ## convert to milliseconds
+
+        if not self.interface:
+            try:
+                iface = txscapy.getDefaultIface()
+            except Exception, e:
+                log.msg("No network interface specified!")
+                log.err(e)
+            else:
+                log.msg("Using system default interface: %s" % iface)
+                self.interface = iface
+
+        if self.pcap:
+            try:
+                self.pcapfile = open(self.pcap, 'a+')
+            except:
+                log.msg("Unable to write to pcap file %s" % self.pcap)
+            else:
+                self.pcap = net.capturePacket(self.pcapfile)
+
+        if not self.dst:
+            if self.file:
+                self.dstProcessor(self.file)
+                for key, value in self.destinations.items():
+                    for label, data in value.items():
+                        if not 'ans' in data:
+                            self.dst = label
+        else:
+            self.addDest(self.dst)
+        log.debug("self.dst is now: %s" % self.dst)
+
+        log.debug("Initialization of %s test completed." % self.name)
+
+    def addDest(self, dest):
+        d = dest.strip()
+        self.destinations[d] = {'dst_ip': d}
+
+    def dstProcessor(self, inputfile):
+        from ipaddr import IPAddress
+
+        if os.path.isfile(inputfile):
+            with open(inputfile) as f:
+                for line in f.readlines():
+                    if line.startswith('#'):
+                        continue
+                    self.addDest(line)
+
+    def test_icmp(self):
+        def process_response(echo_reply, dest):
+           ans, unans = echo_reply
+           if ans:
+               log.msg("Recieved echo reply from %s: %s" % (dest, ans))
+           else:
+               log.msg("No reply was received from %s. Possible censorship event." % dest)
+               log.debug("Unanswered packets: %s" % unans)
+           self.report[dest] = echo_reply
+
+        for label, data in self.destinations.items():
+            reply = sr1(IP(dst=lebal)/ICMP())
+            process = process_reponse(reply, label)
+
+        #(ans, unans) = ping
+        #self.destinations[self.dst].update({'ans': ans,
+        #                                    'unans': unans,
+        #                                    'response_packet': ping})
+        #return ping
+
+        #return reply
diff --git a/ooni/nettests/experimental/chinatrigger.py b/ooni/nettests/experimental/chinatrigger.py
new file mode 100644
index 0000000..de1f64d
--- /dev/null
+++ b/ooni/nettests/experimental/chinatrigger.py
@@ -0,0 +1,108 @@
+import random
+import string
+import struct
+import time
+
+from twisted.python import usage
+from ooni.templates.scapyt import BaseScapyTest
+
+class UsageOptions(usage.Options):
+    optParameters = [['dst', 'd', None, 'Specify the target address'],
+                     ['port', 'p', None, 'Specify the target port']
+                    ]
+
+class ChinaTriggerTest(BaseScapyTest):
+    """
+    This test is a OONI based implementation of the C tool written
+    by Philipp Winter to engage chinese probes in active scanning.
+
+    Example of running it:
+    ./bin/ooniprobe chinatrigger -d 127.0.0.1 -p 8080
+    """
+
+    name = "chinatrigger"
+    usageOptions = UsageOptions
+    requiredOptions = ['dst', 'port']
+    timeout = 2
+
+    def setUp(self):
+        self.dst = self.localOptions['dst']
+        self.port = int(self.localOptions['port'])
+
+    @staticmethod
+    def set_random_servername(pkt):
+        ret = pkt[:121]
+        for i in range(16):
+            ret += random.choice(string.ascii_lowercase)
+        ret += pkt[121+16:]
+        return ret
+
+    @staticmethod
+    def set_random_time(pkt):
+        ret = pkt[:11]
+        ret += struct.pack('!I', int(time.time()))
+        ret += pkt[11+4:]
+        return ret
+
+    @staticmethod
+    def set_random_field(pkt):
+        ret = pkt[:15]
+        for i in range(28):
+            ret += chr(random.randint(0, 255))
+        ret += pkt[15+28:]
+        return ret
+
+    @staticmethod
+    def mutate(pkt, idx):
+        """
+        Slightly changed mutate function.
+        """
+        ret = pkt[:idx-1]
+        mutation = chr(random.randint(0, 255))
+        while mutation == pkt[idx]:
+            mutation = chr(random.randint(0, 255))
+        ret += mutation
+        ret += pkt[idx:]
+        return ret
+
+    @staticmethod
+    def set_all_random_fields(pkt):
+        pkt = ChinaTriggerTest.set_random_servername(pkt)
+        pkt = ChinaTriggerTest.set_random_time(pkt)
+        pkt = ChinaTriggerTest.set_random_field(pkt)
+        return pkt
+
+    def test_send_mutations(self):
+        from scapy.all import IP, TCP
+        pkt = "\x16\x03\x01\x00\xcc\x01\x00\x00\xc8"\
+              "\x03\x01\x4f\x12\xe5\x63\x3f\xef\x7d"\
+              "\x20\xb9\x94\xaa\x04\xb0\xc1\xd4\x8c"\
+              "\x50\xcd\xe2\xf9\x2f\xa9\xfb\x78\xca"\
+              "\x02\xa8\x73\xe7\x0e\xa8\xf9\x00\x00"\
+              "\x3a\xc0\x0a\xc0\x14\x00\x39\x00\x38"\
+              "\xc0\x0f\xc0\x05\x00\x35\xc0\x07\xc0"\
+              "\x09\xc0\x11\xc0\x13\x00\x33\x00\x32"\
+              "\xc0\x0c\xc0\x0e\xc0\x02\xc0\x04\x00"\
+              "\x04\x00\x05\x00\x2f\xc0\x08\xc0\x12"\
+              "\x00\x16\x00\x13\xc0\x0d\xc0\x03\xfe"\
+              "\xff\x00\x0a\x00\xff\x01\x00\x00\x65"\
+              "\x00\x00\x00\x1d\x00\x1b\x00\x00\x18"\
+              "\x77\x77\x77\x2e\x67\x6e\x6c\x69\x67"\
+              "\x78\x7a\x70\x79\x76\x6f\x35\x66\x76"\
+              "\x6b\x64\x2e\x63\x6f\x6d\x00\x0b\x00"\
+              "\x04\x03\x00\x01\x02\x00\x0a\x00\x34"\
+              "\x00\x32\x00\x01\x00\x02\x00\x03\x00"\
+              "\x04\x00\x05\x00\x06\x00\x07\x00\x08"\
+              "\x00\x09\x00\x0a\x00\x0b\x00\x0c\x00"\
+              "\x0d\x00\x0e\x00\x0f\x00\x10\x00\x11"\
+              "\x00\x12\x00\x13\x00\x14\x00\x15\x00"\
+              "\x16\x00\x17\x00\x18\x00\x19\x00\x23"\
+              "\x00\x00"
+
+        pkt = ChinaTriggerTest.set_all_random_fields(pkt)
+        pkts = [IP(dst=self.dst)/TCP(dport=self.port)/pkt]
+        for x in range(len(pkt)):
+            mutation = IP(dst=self.dst)/TCP(dport=self.port)/ChinaTriggerTest.mutate(pkt, x)
+            pkts.append(mutation)
+        return self.sr(pkts, timeout=2)
+
diff --git a/ooni/nettests/experimental/dns_injection.py b/ooni/nettests/experimental/dns_injection.py
new file mode 100644
index 0000000..97233cf
--- /dev/null
+++ b/ooni/nettests/experimental/dns_injection.py
@@ -0,0 +1,63 @@
+# -*- encoding: utf-8 -*-
+from twisted.python import usage
+from twisted.internet import defer
+
+from ooni.templates import dnst
+from ooni import nettest
+from ooni.utils import log
+
+class UsageOptions(usage.Options):
+    optParameters = [
+            ['resolver', 'r', '8.8.8.1', 'an invalid DNS resolver'],
+            ['timeout', 't', 3, 'timeout after which we should consider the query failed']
+    ]
+
+class DNSInjectionTest(dnst.DNSTest):
+    """
+    This test detects DNS spoofed DNS responses by performing UDP based DNS
+    queries towards an invalid DNS resolver.
+
+    For it to work we must be traversing the network segment of a machine that
+    is actively injecting DNS query answers.
+    """
+    name = "DNS Injection"
+    description = "Checks for injection of spoofed DNS answers"
+    version = "0.1"
+    authors = "Arturo Filastò"
+
+    inputFile = ['file', 'f', None,
+                 'Input file of list of hostnames to attempt to resolve']
+
+    usageOptions = UsageOptions
+    requiredOptions = ['resolver', 'file']
+
+    def setUp(self):
+        self.resolver = (self.localOptions['resolver'], 53)
+        self.queryTimeout = [self.localOptions['timeout']]
+
+    def inputProcessor(self, filename):
+        fp = open(filename)
+        for line in fp:
+            if line.startswith('http://'):
+                yield line.replace('http://', '').replace('/', '').strip()
+            else:
+                yield line.strip()
+        fp.close()
+
+    def test_injection(self):
+        self.report['injected'] = None
+
+        d = self.performALookup(self.input, self.resolver)
+        @d.addCallback
+        def cb(res):
+            log.msg("The DNS query for %s is injected" % self.input)
+            self.report['injected'] = True
+
+        @d.addErrback
+        def err(err):
+            err.trap(defer.TimeoutError)
+            log.msg("The DNS query for %s is not injected" % self.input)
+            self.report['injected'] = False
+
+        return d
+
diff --git a/ooni/nettests/experimental/domclass_collector.py b/ooni/nettests/experimental/domclass_collector.py
new file mode 100644
index 0000000..c1866f2
--- /dev/null
+++ b/ooni/nettests/experimental/domclass_collector.py
@@ -0,0 +1,33 @@
+# -*- encoding: utf-8 -*-
+#
+# The purpose of this collector is to compute the eigenvector for the input
+# file containing a list of sites.
+#
+#
+# :authors: Arturo Filastò
+# :licence: see LICENSE
+
+from twisted.internet import threads, defer
+
+from ooni.kit import domclass
+from ooni.templates import httpt
+
+class DOMClassCollector(httpt.HTTPTest):
+    name = "DOM class collector"
+    author = "Arturo Filastò"
+    version = 0.1
+
+    followRedirects = True
+
+    inputFile = ['file', 'f', None, 'The list of urls to build a domclass for']
+
+    def test_collect(self):
+        if self.input:
+            url = self.input
+            return self.doRequest(url)
+        else:
+            raise Exception("No input specified")
+
+    def processResponseBody(self, body):
+        eigenvalues = domclass.compute_eigenvalues_from_DOM(content=body)
+        self.report['eigenvalues'] = eigenvalues.tolist()
diff --git a/ooni/nettests/experimental/http_filtering_bypassing.py b/ooni/nettests/experimental/http_filtering_bypassing.py
new file mode 100644
index 0000000..dc103db
--- /dev/null
+++ b/ooni/nettests/experimental/http_filtering_bypassing.py
@@ -0,0 +1,84 @@
+# -*- encoding: utf-8 -*-
+from twisted.python import usage
+
+from ooni.utils import log
+from ooni.utils import randomStr, randomSTR
+from ooni.templates import tcpt
+
+class UsageOptions(usage.Options):
+    optParameters = [['backend', 'b', '127.0.0.1',
+                        'The OONI backend that runs a TCP echo server'],
+                    ['backendport', 'p', 80, 'Specify the port that the TCP echo server is running (should only be set for debugging)']]
+
+class HTTPFilteringBypass(tcpt.TCPTest):
+    name = "HTTPFilteringBypass"
+    version = "0.1"
+    authors = "xx"
+
+    inputFile = ['file', 'f', None,
+            'Specify a list of hostnames to use as inputs']
+
+    usageOptions = UsageOptions
+    requiredOptions = ['backend']
+
+    def setUp(self):
+        self.port = int(self.localOptions['backendport'])
+        self.address = self.localOptions['backend']
+
+    def check_for_manipulation(self, response, payload):
+        log.debug("Checking if %s == %s" % (response, payload))
+        if response != payload:
+            self.report['tampering'] = True
+        else:
+            self.report['tampering'] = False
+
+    def test_prepend_newline(self):
+        payload = "\nGET / HTTP/1.1\n\r"
+        payload += "Host: %s\n\r" % self.input
+
+        d = self.sendPayload(payload)
+        d.addCallback(self.check_for_manipulation, payload)
+        return d
+
+    def test_tab_trick(self):
+        payload = "GET / HTTP/1.1\n\r"
+        payload += "Host: %s\t\n\r" % self.input
+
+        d = self.sendPayload(payload)
+        d.addCallback(self.check_for_manipulation, payload)
+        return d
+
+    def test_subdomain_blocking(self):
+        payload = "GET / HTTP/1.1\n\r"
+        payload += "Host: %s\n\r" % randomStr(10) + '.' + self.input
+
+        d = self.sendPayload(payload)
+        d.addCallback(self.check_for_manipulation, payload)
+        return d
+
+    def test_fuzzy_domain_blocking(self):
+        hostname_field = randomStr(10) + '.' + self.input + '.' + randomStr(10)
+        payload = "GET / HTTP/1.1\n\r"
+        payload += "Host: %s\n\r" % hostname_field
+
+        d = self.sendPayload(payload)
+        d.addCallback(self.check_for_manipulation, payload)
+        return d
+
+    def test_fuzzy_match_blocking(self):
+        hostname_field = randomStr(10) + self.input + randomStr(10)
+        payload = "GET / HTTP/1.1\n\r"
+        payload += "Host: %s\n\r" % hostname_field
+
+        d = self.sendPayload(payload)
+        d.addCallback(self.check_for_manipulation, payload)
+        return d
+
+    def test_normal_request(self):
+        payload = "GET / HTTP/1.1\n\r"
+        payload += "Host: %s\n\r" % self.input
+
+        d = self.sendPayload(payload)
+        d.addCallback(self.check_for_manipulation, payload)
+        return d
+
diff --git a/ooni/nettests/experimental/http_keyword_filtering.py b/ooni/nettests/experimental/http_keyword_filtering.py
new file mode 100644
index 0000000..0ae9c52
--- /dev/null
+++ b/ooni/nettests/experimental/http_keyword_filtering.py
@@ -0,0 +1,45 @@
+# -*- encoding: utf-8 -*-
+#
+# :authors: Arturo Filastò
+# :licence: see LICENSE
+
+from twisted.python import usage
+
+from ooni.templates import httpt
+
+class UsageOptions(usage.Options):
+    optParameters = [['backend', 'b', 'http://127.0.0.1:57001',
+                        'URL of the test backend to use']]
+
+class HTTPKeywordFiltering(httpt.HTTPTest):
+    """
+    This test involves performing HTTP requests containing to be tested for
+    censorship keywords.
+
+    It does not detect censorship on the client, but just logs the response from the 
+    HTTP backend server.
+    """
+    name = "HTTP Keyword Filtering"
+    author = "Arturo Filastò"
+    version = "0.1.1"
+
+    inputFile = ['file', 'f', None, 'List of keywords to use for censorship testing']
+
+    usageOptions = UsageOptions
+
+    requiredOptions = ['backend']
+
+    def test_get(self):
+        """
+        Perform a HTTP GET request to the backend containing the keyword to be
+        tested inside of the request body.
+        """
+        return self.doRequest(self.localOptions['backend'], method="GET", body=self.input)
+
+    def test_post(self):
+        """
+        Perform a HTTP POST request to the backend containing the keyword to be
+        tested inside of the request body.
+        """
+        return self.doRequest(self.localOptions['backend'], method="POST", body=self.input)
+
diff --git a/ooni/nettests/experimental/http_trix.py b/ooni/nettests/experimental/http_trix.py
new file mode 100644
index 0000000..85a4ba2
--- /dev/null
+++ b/ooni/nettests/experimental/http_trix.py
@@ -0,0 +1,47 @@
+# -*- encoding: utf-8 -*-
+from twisted.python import usage
+
+from ooni.utils import log
+from ooni.utils import randomStr, randomSTR
+from ooni.templates import tcpt
+
+class UsageOptions(usage.Options):
+    optParameters = [['backend', 'b', '127.0.0.1',
+                        'The OONI backend that runs a TCP echo server'],
+                    ['backendport', 'p', 80, 'Specify the port that the TCP echo server is running (should only be set for debugging)']]
+
+class HTTPTrix(tcpt.TCPTest):
+    name = "HTTPTrix"
+    version = "0.1"
+    authors = "Arturo Filastò"
+
+    usageOptions = UsageOptions
+    requiredOptions = ['backend']
+
+    def setUp(self):
+        self.port = int(self.localOptions['backendport'])
+        self.address = self.localOptions['backend']
+
+    def check_for_manipulation(self, response, payload):
+        log.debug("Checking if %s == %s" % (response, payload))
+        if response != payload:
+            self.report['tampering'] = True
+        else:
+            self.report['tampering'] = False
+
+    def test_for_squid_cache_object(self):
+        """
+        This detects the presence of a squid transparent HTTP proxy by sending
+        a request for cache_object://localhost/info.
+
+        This tests for the presence of a Squid Transparent proxy by sending:
+
+            GET cache_object://localhost/info HTTP/1.1
+        """
+        payload = 'GET cache_object://localhost/info HTTP/1.1'
+        payload += '\n\r'
+
+        d = self.sendPayload(payload)
+        d.addCallback(self.check_for_manipulation, payload)
+        return d
+
diff --git a/ooni/nettests/experimental/http_uk_mobile_networks.py b/ooni/nettests/experimental/http_uk_mobile_networks.py
new file mode 100644
index 0000000..784a9e9
--- /dev/null
+++ b/ooni/nettests/experimental/http_uk_mobile_networks.py
@@ -0,0 +1,85 @@
+# -*- encoding: utf-8 -*-
+import yaml
+
+from twisted.python import usage
+from twisted.plugin import IPlugin
+
+from ooni.templates import httpt
+from ooni.utils import log
+
+class UsageOptions(usage.Options):
+    """
+    See https://github.com/hellais/ooni-inputs/processed/uk_mobile_networks_redirects.yaml 
+    to see how the rules file should look like.
+    """
+    optParameters = [
+                     ['rules', 'y', None, 
+                    'Specify the redirect rules file ']
+                    ]
+
+class HTTPUKMobileNetworksTest(httpt.HTTPTest):
+    """
+    This test was thought of by Open Rights Group and implemented with the
+    purpose of detecting censorship in the UK.
+    For more details on this test see:
+    https://trac.torproject.org/projects/tor/ticket/6437
+    XXX port the knowledge from the trac ticket into this test docstring
+    """
+    name = "HTTP UK mobile network redirect test"
+
+    usageOptions = UsageOptions
+
+    followRedirects = True
+
+    inputFile = ['urls', 'f', None, 'List of urls one per line to test for censorship']
+    requiredOptions = ['urls']
+
+    def testPattern(self, value, pattern, type):
+        if type == 'eq':
+            return value == pattern
+        elif type == 're':
+            import re
+            if re.match(pattern, value):
+                return True
+            else:
+                return False
+        else:
+            return None
+
+    def testPatterns(self, patterns, location):
+        test_result = False
+
+        if type(patterns) == list:
+            for pattern in patterns:
+                test_result |= self.testPattern(location, pattern['value'], pattern['type'])
+        rules_file = self.localOptions['rules']
+
+        return test_result
+
+    def testRules(self, rules, location):
+        result = {}
+        blocked = False
+        for rule, value in rules.items():
+            current_rule = {}
+            current_rule['name'] = value['name']
+            current_rule['patterns'] = value['patterns']
+            current_rule['test'] = self.testPatterns(value['patterns'], location)
+            blocked |= current_rule['test']
+            result[rule] = current_rule
+        result['blocked'] = blocked
+        return result
+
+    def processRedirect(self, location):
+        self.report['redirect'] = None
+        rules_file = self.localOptions['rules']
+
+        fp = open(rules_file)
+        rules = yaml.safe_load(fp)
+        fp.close()
+
+        log.msg("Testing rules %s" % rules)
+        redirect = self.testRules(rules, location)
+        self.report['redirect'] = redirect
+
+
+
diff --git a/ooni/nettests/experimental/keyword_filtering.py b/ooni/nettests/experimental/keyword_filtering.py
new file mode 100644
index 0000000..9eec4ff
--- /dev/null
+++ b/ooni/nettests/experimental/keyword_filtering.py
@@ -0,0 +1,52 @@
+# -*- encoding: utf-8 -*-
+#
+# :authors: Arturo Filastò
+# :licence: see LICENSE
+
+from twisted.python import usage
+from twisted.internet import defer
+
+from ooni.utils import log
+from ooni.templates import scapyt
+
+from scapy.all import *
+
+class UsageOptions(usage.Options):
+    optParameters = [
+                    ['backend', 'b', '127.0.0.1:57002', 'Test backend running TCP echo'],
+                    ['timeout', 't', 5, 'Timeout after which to give up waiting for RST packets']
+                    ]
+
+class KeywordFiltering(scapyt.BaseScapyTest):
+    name = "Keyword Filtering detection based on RST packets"
+    author = "Arturo Filastò"
+    version = "0.1"
+
+    usageOptions = UsageOptions
+
+    inputFile = ['file', 'f', None, 
+            'List of keywords to use for censorship testing']
+
+    def test_tcp_keyword_filtering(self):
+        """
+        Places the keyword to be tested in the payload of a TCP packet.
+        XXX need to implement bisection method for enumerating keywords.
+            though this should not be an issue since we are testing all 
+            the keywords in parallel.
+        """
+        def finished(packets):
+            log.debug("Finished running TCP traceroute test on port %s" % port)
+            answered, unanswered = packets
+            self.report['rst_packets'] = []
+            for snd, rcv in answered:
+                # The received packet has the RST flag
+                if rcv[TCP].flags == 4:
+                    self.report['rst_packets'].append(rcv)
+
+        backend_ip, backend_port = self.localOptions['backend']
+        keyword_to_test = str(self.input)
+        packets = IP(dst=backend_ip,id=RandShort())/TCP(dport=backend_port)/keyword_to_test
+        d = self.sr(packets, timeout=timeout)
+        d.addCallback(finished)
+        return d
+
diff --git a/ooni/nettests/experimental/parasitictraceroute.py b/ooni/nettests/experimental/parasitictraceroute.py
new file mode 100644
index 0000000..631c24b
--- /dev/null
+++ b/ooni/nettests/experimental/parasitictraceroute.py
@@ -0,0 +1,129 @@
+# -*- encoding: utf-8 -*-
+#
+# :authors: Arturo Filastò
+# :licence: see LICENSE
+
+from twisted.python import usage
+from twisted.internet import defer
+
+from ooni.templates import scapyt
+
+from scapy.all import *
+
+from ooni.utils import log
+
+class UsageOptions(usage.Options):
+    optParameters = [['backend', 'b', 'google.com', 'Test backend to use'],
+                    ['timeout', 't', 5, 'The timeout for the traceroute test'],
+                    ['maxttl', 'm', 64, 'The maximum value of ttl to set on packets'],
+                    ['dstport', 'd', 80, 'Set the destination port of the traceroute test'],
+                    ['srcport', 'p', None, 'Set the source port to a specific value']]
+
+class ParasiticalTracerouteTest(scapyt.BaseScapyTest):
+    name = "Parasitic TCP Traceroute Test"
+    author = "Arturo Filastò"
+    version = "0.1"
+
+    usageOptions = UsageOptions
+
+    def setUp(self):
+        def get_sport():
+            if self.localOptions['srcport']:
+                return int(self.localOptions['srcport'])
+            else:
+                return random.randint(1024, 65535)
+        self.get_sport = get_sport
+
+        self.dst_ip = socket.gethostbyaddr(self.localOptions['backend'])[2][0]
+
+        self.dport = int(self.localOptions['dstport'])
+        self.max_ttl = int(self.localOptions['maxttl'])
+
+    @defer.inlineCallbacks
+    def test_parasitic_tcp_traceroute(self):
+        """
+        Establishes a TCP stream, then sequentially sends TCP packets with
+        increasing TTL until we reach the ttl of the destination.
+
+        Requires the backend to respond with an ACK to our SYN packet (i.e.
+        the port must be open)
+
+        XXX this currently does not work properly. The problem lies in the fact
+        that we are currently using the scapy layer 3 socket. This socket makes
+        packets received be trapped by the kernel TCP stack, therefore when we
+        send out a SYN and get back a SYN-ACK the kernel stack will reply with
+        a RST because it did not send a SYN.
+
+        The quick fix to this would be to establish a TCP stream using socket
+        calls and then "cannibalizing" the TCP session with scapy.
+
+        The real fix is to make scapy use libpcap instead of raw sockets
+        obviously as we previously did... arg.
+        """
+        sport = self.get_sport()
+        dport = self.dport
+        ipid = int(RandShort())
+
+        ip_layer = IP(dst=self.dst_ip,
+                id=ipid, ttl=self.max_ttl)
+
+        syn = ip_layer/TCP(sport=sport, dport=dport, flags="S", seq=0)
+
+        log.msg("Sending...")
+        syn.show2()
+
+        synack = yield self.sr1(syn)
+
+        log.msg("Got response...")
+        synack.show2()
+
+        if not synack:
+            log.err("Got no response. Try increasing max_ttl")
+            return
+
+        if synack[TCP].flags == 11:
+            log.msg("Got back a FIN ACK. The destination port is closed")
+            return
+
+        elif synack[TCP].flags == 18:
+            log.msg("Got a SYN ACK. All is well.")
+        else:
+            log.err("Got an unexpected result")
+            return
+
+        ack = ip_layer/TCP(sport=synack.dport,
+                            dport=dport, flags="A",
+                            seq=synack.ack, ack=synack.seq + 1)
+
+        yield self.send(ack)
+
+        self.report['hops'] = []
+        # For the time being we make the assumption that we are NATted and
+        # that the NAT will forward the packet to the destination even if the TTL has 
+        for ttl in range(1, self.max_ttl):
+            log.msg("Sending packet with ttl of %s" % ttl)
+            ip_layer.ttl = ttl
+            empty_tcp_packet = ip_layer/TCP(sport=synack.dport,
+                    dport=dport, flags="A",
+                    seq=synack.ack, ack=synack.seq + 1)
+
+            answer = yield self.sr1(empty_tcp_packet)
+            if not answer:
+                log.err("Got no response for ttl %s" % ttl)
+                continue
+
+            try:
+                icmp = answer[ICMP]
+                report = {'ttl': empty_tcp_packet.ttl,
+                    'address': answer.src,
+                    'rtt': answer.time - empty_tcp_packet.time
+                }
+                log.msg("%s: %s" % (dport, report))
+                self.report['hops'].append(report)
+
+            except IndexError:
+                if answer.src == self.dst_ip:
+                    answer.show()
+                    log.msg("Reached the destination. We have finished the traceroute")
+                    return
+
diff --git a/ooni/nettests/experimental/script.py b/ooni/nettests/experimental/script.py
new file mode 100644
index 0000000..4772f65
--- /dev/null
+++ b/ooni/nettests/experimental/script.py
@@ -0,0 +1,90 @@
+from ooni import nettest
+from ooni.utils import log
+from twisted.internet import defer, protocol, reactor
+from twisted.python import usage
+
+import os
+
+
+def which(program):
+    def is_exe(fpath):
+        return os.path.isfile(fpath) and os.access(fpath, os.X_OK)
+
+    fpath, fname = os.path.split(program)
+    if fpath:
+        if is_exe(program):
+            return program
+    else:
+        for path in os.environ["PATH"].split(os.pathsep):
+            path = path.strip('"')
+            exe_file = os.path.join(path, program)
+            if is_exe(exe_file):
+                return exe_file
+    return None
+
+
+class UsageOptions(usage.Options):
+    optParameters = [
+        ['interpreter', 'i', '', 'The interpreter to use'],
+        ['script', 's', '', 'The script to run']
+    ]
+
+
+class ScriptProcessProtocol(protocol.ProcessProtocol):
+    def __init__(self, test_case):
+        self.test_case = test_case
+        self.deferred = defer.Deferred()
+
+    def connectionMade(self):
+        log.debug("connectionMade")
+        self.transport.closeStdin()
+        self.test_case.report['lua_output'] = ""
+
+    def outReceived(self, data):
+        log.debug('outReceived: %s' % data)
+        self.test_case.report['lua_output'] += data
+
+    def errReceived(self, data):
+        log.err('Script error: %s' % data)
+        self.transport.signalProcess('KILL')
+
+    def processEnded(self, status):
+        rc = status.value.exitCode
+        log.debug('processEnded: %s, %s' % \
+                  (rc, self.test_case.report['lua_output']))
+        if rc == 0:
+            self.deferred.callback(self)
+        else:
+            self.deferred.errback(rc)
+
+
+# TODO: Maybe the script requires a back-end.
+class Script(nettest.NetTestCase):
+    name = "Script test"
+    version = "0.1"
+    authors = "Dominic Hamon"
+
+    usageOptions = UsageOptions
+    requiredOptions = ['interpreter', 'script']
+
+    def test_run_script(self):
+        """
+        We run the script specified in the usage options and take whatever
+        is printed to stdout as the results of the test.
+        """
+        processProtocol = ScriptProcessProtocol(self)
+
+        interpreter = self.localOptions['interpreter']
+        if not which(interpreter):
+            log.err('Unable to find %s executable in PATH.' % interpreter)
+            return
+
+        reactor.spawnProcess(processProtocol,
+                             interpreter,
+                             args=[interpreter, self.localOptions['script']],
+                             env={'HOME': os.environ['HOME']},
+                             usePTY=True)
+
+        if not reactor.running:
+            reactor.run()
+        return processProtocol.deferred
diff --git a/ooni/nettests/experimental/squid.py b/ooni/nettests/experimental/squid.py
new file mode 100644
index 0000000..777bc3e
--- /dev/null
+++ b/ooni/nettests/experimental/squid.py
@@ -0,0 +1,117 @@
+# -*- encoding: utf-8 -*-
+#
+# Squid transparent HTTP proxy detector
+# *************************************
+#
+# :authors: Arturo Filastò
+# :licence: see LICENSE
+
+from ooni import utils
+from ooni.utils import log
+from ooni.templates import httpt
+
+class SquidTest(httpt.HTTPTest):
+    """
+    This test aims at detecting the presence of a squid based transparent HTTP
+    proxy. It also tries to detect the version number.
+    """
+    name = "Squid test"
+    author = "Arturo Filastò"
+    version = "0.1"
+
+    optParameters = [['backend', 'b', 'http://ooni.nu/test/', 'Test backend to use']]
+
+    #inputFile = ['urls', 'f', None, 'Urls file']
+    inputs =['http://google.com']
+    def test_cacheobject(self):
+        """
+        This detects the presence of a squid transparent HTTP proxy by sending
+        a request for cache_object://localhost/info.
+
+        The response to this request will usually also contain the squid
+        version number.
+        """
+        log.debug("Running")
+        def process_body(body):
+            if "Access Denied." in body:
+                self.report['transparent_http_proxy'] = True
+            else:
+                self.report['transparent_http_proxy'] = False
+
+        log.msg("Testing Squid proxy presence by sending a request for "\
+                "cache_object")
+        headers = {}
+        #headers["Host"] = [self.input]
+        self.report['trans_http_proxy'] = None
+        method = "GET"
+        body = "cache_object://localhost/info"
+        return self.doRequest(self.localOptions['backend'], method=method, body=body,
+                        headers=headers, body_processor=process_body)
+
+    def test_search_bad_request(self):
+        """
+        Attempts to perform a request with a random invalid HTTP method.
+
+        If we are being MITMed by a Transparent Squid HTTP proxy we will get
+        back a response containing the X-Squid-Error header.
+        """
+        def process_headers(headers):
+            log.debug("Processing headers in test_search_bad_request")
+            if 'X-Squid-Error' in headers:
+                log.msg("Detected the presence of a transparent HTTP "\
+                        "squid proxy")
+                self.report['trans_http_proxy'] = True
+            else:
+                log.msg("Did not detect the presence of transparent HTTP "\
+                        "squid proxy")
+                self.report['transparent_http_proxy'] = False
+
+        log.msg("Testing Squid proxy presence by sending a random bad request")
+        headers = {}
+        #headers["Host"] = [self.input]
+        method = utils.randomSTR(10, True)
+        self.report['transparent_http_proxy'] = None
+        return self.doRequest(self.localOptions['backend'], method=method,
+                        headers=headers, headers_processor=process_headers)
+
+    def test_squid_headers(self):
+        """
+        Detects the presence of a squid transparent HTTP proxy based on the
+        response headers it adds to the responses to requests.
+        """
+        def process_headers(headers):
+            """
+            Checks if any of the headers that squid is known to add match the
+            squid regexp.
+
+            We are looking for something that looks like this:
+
+                via: 1.0 cache_server:3128 (squid/2.6.STABLE21)
+                x-cache: MISS from cache_server
+                x-cache-lookup: MISS from cache_server:3128
+            """
+            squid_headers = {'via': r'.* \((squid.*)\)',
+                        'x-cache': r'MISS from (\w+)',
+                        'x-cache-lookup': r'MISS from (\w+:?\d+?)'
+                        }
+
+            self.report['transparent_http_proxy'] = False
+            for key in squid_headers.keys():
+                if key in headers:
+                    log.debug("Found %s in headers" % key)
+                    m = re.search(squid_headers[key], headers[key])
+                    if m:
+                        log.msg("Detected the presence of squid transparent"\
+                                " HTTP Proxy")
+                        self.report['transparent_http_proxy'] = True
+
+        log.msg("Testing Squid proxy by looking at response headers")
+        headers = {}
+        #headers["Host"] = [self.input]
+        method = "GET"
+        self.report['transparent_http_proxy'] = None
+        d = self.doRequest(self.localOptions['backend'], method=method,
+                        headers=headers, headers_processor=process_headers)
+        return d
+
+
diff --git a/ooni/nettests/experimental/tls_handshake.py b/ooni/nettests/experimental/tls_handshake.py
new file mode 100644
index 0000000..5da2e8b
--- /dev/null
+++ b/ooni/nettests/experimental/tls_handshake.py
@@ -0,0 +1,809 @@
+#!/usr/bin/env python
+# -*- encoding: utf-8 -*-
+"""
+  tls_handshake.py
+  ----------------
+
+  This file contains test cases for determining if a TLS handshake completes
+  successfully, including ways to test if a TLS handshake which uses Mozilla
+  Firefox's current ciphersuite list completes. Rather than using Twisted and
+  OpenSSL's methods for automatically completing a handshake, which includes
+  setting all the parameters, such as the ciphersuite list, these tests use
+  non-blocking sockets and implement asychronous error-handling transversal of
+  OpenSSL's memory BIO state machine, allowing us to determine where and why a
+  handshake fails.
+
+  This network test is a complete rewrite of a pseudonymously contributed
+  script by Hackerberry Finn, in order to fit into OONI's core network tests.
+
+  @authors: Isis Agora Lovecruft <isis@xxxxxxxxxxxxxx>
+  @license: see included LICENSE file
+  @copyright: © 2013 Isis Lovecruft, The Tor Project Inc.
+"""
+
+from socket import error   as socket_error
+from socket import timeout as socket_timeout
+from time   import sleep
+
+import os
+import socket
+import struct
+import sys
+import types
+
+import ipaddr
+import OpenSSL
+
+from OpenSSL                import SSL, crypto
+from twisted.internet       import defer, threads
+from twisted.python         import usage, failure
+
+from ooni       import nettest, config
+from ooni.utils import log
+from ooni.errors import InsufficientPrivileges
+
+## For a way to obtain the current version of Firefox's default ciphersuite
+## list, see https://trac.torproject.org/projects/tor/attachment/ticket/4744/
+## and the attached file "get_mozilla_files.py".
+##
+## Note, however, that doing so requires the source code to the version of
+## firefox that you wish to emulate.
+
+firefox_ciphers = ["ECDHE-ECDSA-AES256-SHA",
+                   "ECDHE-RSA-AES256-SHA",
+                   "DHE-RSA-CAMELLIA256-SHA",
+                   "DHE-DSS-CAMELLIA256-SHA",
+                   "DHE-RSA-AES256-SHA",
+                   "DHE-DSS-AES256-SHA",
+                   "ECDH-ECDSA-AES256-CBC-SHA",
+                   "ECDH-RSA-AES256-CBC-SHA",
+                   "CAMELLIA256-SHA",
+                   "AES256-SHA",
+                   "ECDHE-ECDSA-RC4-SHA",
+                   "ECDHE-ECDSA-AES128-SHA",
+                   "ECDHE-RSA-RC4-SHA",
+                   "ECDHE-RSA-AES128-SHA",
+                   "DHE-RSA-CAMELLIA128-SHA",
+                   "DHE-DSS-CAMELLIA128-SHA",]
+
+
+class SSLContextError(usage.UsageError):
+    """Raised when we're missing the SSL context method, or incompatible
+    contexts were provided. The SSL context method should be one of the
+    following:
+
+        :attr:`OpenSSL.SSL.SSLv2_METHOD <OpenSSL.SSL.SSLv2_METHOD>`
+        :attr:`OpenSSL.SSL.SSLv23_METHOD <OpenSSL.SSL.SSLv23_METHOD>`
+        :attr:`OpenSSL.SSL.SSLv3_METHOD <OpenSSL.SSL.SSLv3_METHOD>`
+        :attr:`OpenSSL.SSL.TLSv1_METHOD <OpenSSL.SSL.TLSv1_METHOD>`
+
+    To use the pre-defined error messages, construct with one of the
+    :meth:`SSLContextError.errors.keys <keys>` as the ``message`` string, like
+    so:
+
+        ``SSLContextError('NO_CONTEXT')``
+    """
+
+    #: Pre-defined error messages.
+    errors = {
+        'NO_CONTEXT': 'No SSL/TLS context chosen! Defaulting to TLSv1.',
+        'INCOMPATIBLE': str("Testing TLSv1 (option '--tls1') is incompatible "
+                            + "with testing SSL ('--ssl2' and '--ssl3')."),
+        'MISSING_SSLV2': str("Your version of OpenSSL was compiled without "
+                             + "support for SSLv2. This is normal on newer "
+                             + "versions of OpenSSL, but it means that you "
+                             + "will be unable to test SSLv2 handshakes "
+                             + "without recompiling OpenSSL."), }
+
+    def __init__(self, message):
+        if message in self.errors.keys():
+            message = self.errors[message]
+        super(usage.UsageError, self).__init__(message)
+
+class HostUnreachable(Exception):
+    """Raised when the host IP address appears to be unreachable."""
+    pass
+
+class ConnectionTimeout(Exception):
+    """Raised when we receive a :class:`socket.timeout <timeout>`, in order to
+    pass the Exception along to
+    :func:`TLSHandshakeTest.test_handshake.connectionFailed
+    <connectionFailed>`.
+    """
+    pass
+
+class HandshakeOptions(usage.Options):
+    """ :class:`usage.Options <Options>` parser for the tls-handshake test."""
+    optParameters = [
+        ['host', 'h', None,
+         'Remote host IP address (v4/v6) and port, i.e. "1.2.3.4:443"'],
+        ['port', 'p', None,
+         'Use this port for all hosts, regardless of port specified in file'],
+        ['ciphersuite', 'c', None ,
+         'File containing ciphersuite list, one per line'],]
+    optFlags = [
+        ['ssl2', '2', 'Use SSLv2'],
+        ['ssl3', '3', 'Use SSLv3'],
+        ['tls1', 't', 'Use TLSv1'],]
+
+class HandshakeTest(nettest.NetTestCase):
+    """An ooniprobe NetTestCase for determining if we can complete a TLS/SSL
+    handshake with a remote host.
+    """
+    name         = 'tls-handshake'
+    author       = 'Isis Lovecruft <isis@xxxxxxxxxxxxxx>'
+    description  = 'A test to determing if we can complete a TLS hankshake.'
+    version      = '0.0.3'
+
+    requiresRoot = False
+    usageOptions = HandshakeOptions
+
+    host = None
+    inputFile = ['file', 'f', None, 'List of <IP>:<PORT>s to test']
+
+    #: Default SSL/TLS context method.
+    context = SSL.Context(SSL.TLSv1_METHOD)
+
+    def setUp(self, *args, **kwargs):
+        """Set defaults for a :class:`HandshakeTest <HandshakeTest>`."""
+
+        self.ciphers = list()
+
+        if self.localOptions:
+            options = self.localOptions
+
+            ## check that we're testing an IP:PORT, else exit gracefully:
+            if not (options['host']  or options['file']):
+                raise SystemExit("Need --host or --file!")
+            if options['host']:
+                self.host = options['host']
+
+            ## If no context was chosen, explain our default to the user:
+            if not (options['ssl2'] or options['ssl3'] or options['tls1']):
+                try: raise SSLContextError('NO_CONTEXT')
+                except SSLContextError as sce: log.err(sce.message)
+            else:
+                ## If incompatible contexts were chosen, inform the user:
+                if options['tls1'] and (options['ssl2'] or options['ssl3']):
+                    try: raise SSLContextError('INCOMPATIBLE')
+                    except SSLContextError as sce: log.err(sce.message)
+                    finally: log.msg('Defaulting to testing only TLSv1.')
+                elif options['ssl2']:
+                    try:
+                        if not options['ssl3']:
+                            context = SSL.Context(SSL.SSLv2_METHOD)
+                        else:
+                            context = SSL.Context(SSL.SSLv23_METHOD)
+                    except ValueError as ve:
+                        log.err(ve.message)
+                        try: raise SSLContextError('MISSING_SSLV2')
+                        except SSLContextError as sce:
+                            log.err(sce.message)
+                            log.msg("Falling back to testing only TLSv1.")
+                            context = SSL.Context(SSL.TLSv1_METHOD)
+                elif options['ssl3']:
+                    context = SSL.Context(SSL.SSLv3_METHOD)
+            ## finally, reset the context if the user's choice was okay:
+            if context: self.context = context
+
+            ## if we weren't given a file with a list of ciphersuites to use,
+            ## then use the firefox default list:
+            if not options['ciphersuite']:
+                self.ciphers = firefox_ciphers
+                log.msg('Using default Firefox ciphersuite list.')
+            else:
+                if os.path.isfile(options['ciphersuite']):
+                    log.msg('Using ciphersuite list from "%s"'
+                            % options['ciphersuite'])
+                    with open(options['ciphersuite']) as cipherfile:
+                        for line in cipherfile.readlines():
+                            self.ciphers.append(line.strip())
+            self.ciphersuite = ":".join(self.ciphers)
+
+        if getattr(config.advanced, 'default_timeout', None) is not None:
+            self.timeout = config.advanced.default_timeout
+        else:
+            self.timeout = 30   ## default the timeout to 30 seconds
+
+        ## xxx For debugging, set the socket timeout higher anyway:
+        self.timeout = 30
+
+        ## We have to set the default timeout on our sockets before creation:
+        socket.setdefaulttimeout(self.timeout)
+
+    def splitInput(self, input):
+        addr, port = input.strip().rsplit(':', 1)
+        if self.localOptions['port']:
+            port = self.localOptions['port']
+        return (str(addr), int(port))
+
+    def inputProcessor(self, file=None):
+        if self.host:
+            yield self.splitInput(self.host)
+        if os.path.isfile(file):
+            with open(file) as fh:
+                for line in fh.readlines():
+                    if line.startswith('#'):
+                        continue
+                    yield self.splitInput(line)
+
+    def buildSocket(self, addr):
+        global s
+        ip = ipaddr.IPAddress(addr) ## learn if we're IPv4 or IPv6
+        if ip.version == 4:
+            s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
+        elif ip.version == 6:
+            s = socket.socket(socket.AF_INET6, socket.SOCK_STREAM)
+        return s
+
+    def getContext(self):
+        self.context.set_cipher_list(self.ciphersuite)
+        return self.context
+
+    @staticmethod
+    def getPeerCert(connection, get_chain=False):
+        """Get the PEM-encoded certificate or cert chain of the remote host.
+
+        :param connection: A :class:`OpenSSL.SSL.Connection <Connection>`.
+        :param bool get_chain: If True, get the all certificates in the
+            chain. Otherwise, only get the remote host's certificate.
+        :returns: A PEM-encoded x509 certificate. If
+            :param:`getPeerCert.get_chain <get_chain>` is True, returns a list
+            of PEM-encoded x509 certificates.
+        """
+        if not get_chain:
+            x509_cert = connection.get_peer_certificate()
+            pem_cert = crypto.dump_certificate(crypto.FILETYPE_PEM, x509_cert)
+            return pem_cert
+        else:
+            cert_chain = []
+            x509_cert_chain = connection.get_peer_cert_chain()
+            for x509_cert in x509_cert_chain:
+                pem_cert = crypto.dump_certificate(crypto.FILETYPE_PEM,
+                                                   x509_cert)
+                cert_chain.append(pem_cert)
+            return cert_chain
+
+    @staticmethod
+    def getX509Name(certificate, get_components=False):
+        """Get the DER-encoded form of the Name fields of an X509 certificate.
+
+        @param certificate: A :class:`OpenSSL.crypto.X509Name` object.
+        @param get_components: A boolean. If True, returns a list of tuples of
+                               the (name, value)s of each Name field in the
+                               :param:`certificate`. If False, returns the DER
+                               encoded form of the Name fields of the
+                               :param:`certificate`.
+        """
+        x509_name = None
+
+        try:
+            assert isinstance(certificate, crypto.X509Name), \
+                "getX509Name takes OpenSSL.crypto.X509Name as first argument!"
+            x509_name = crypto.X509Name(certificate)
+        except AssertionError as ae:
+            log.err(ae)
+        except Exception as exc:
+            log.exception(exc)
+
+        if not x509_name is None:
+            if not get_components:
+                return x509_name.der()
+            else:
+                return x509_name.get_components()
+        else:
+            log.debug("getX509Name: got None for ivar x509_name")
+
+    @staticmethod
+    def getPublicKey(key):
+        """Get the PEM-encoded format of a host certificate's public key.
+
+        :param key: A :class:`OpenSSL.crypto.PKey <crypto.PKey>` object.
+        """
+        try:
+            assert isinstance(key, crypto.PKey), \
+                "getPublicKey expects type OpenSSL.crypto.PKey for parameter key"
+        except AssertionError as ae:
+            log.err(ae)
+        else:
+            pubkey = crypto.dump_privatekey(crypto.FILETYPE_PEM, key)
+            return pubkey
+
+    def test_handshake(self):
+        """xxx fill me in"""
+
+        def makeConnection(host):
+            """Create a socket to the remote host's IP address, then get the
+            TLS/SSL context method and ciphersuite list. Lastly, initiate a
+            connection to the host.
+
+            :param tuple host: A tuple of the remote host's IP address as a
+                string, and an integer specifying the remote host port, i.e.
+                ('1.1.1.1',443)
+            :raises: :exc:`ConnectionTimeout` if the socket timed out.
+            :returns: A :class:`OpenSSL.SSL.Connection <Connection>`.
+            """
+            addr, port = host
+            sckt = self.buildSocket(addr)
+            context = self.getContext()
+            connection = SSL.Connection(context, sckt)
+            try:
+               connection.connect(host)
+            except socket_timeout as stmo:
+               error = ConnectionTimeout(stmo.message)
+               return failure.Failure(error)
+            else:
+               return connection
+
+        def connectionFailed(connection, host):
+            """Handle errors raised while attempting to create the socket and
+            :class:`OpenSSL.SSL.Connection <Connection>`, and setting the
+            TLS/SSL context.
+
+            :type connection: :exc:Exception
+            :param connection: The exception that was raised in
+                :func:`HandshakeTest.test_handshake.makeConnection
+                <makeConnection>`.
+            :param tuple host: A tuple of the host IP address as a string, and
+                an int specifying the host port, i.e. ('1.1.1.1', 443)
+            :rtype: :exc:Exception
+            :returns: The original exception.
+            """
+            addr, port = host
+
+            if not isinstance(connection, SSL.Connection):
+                if isinstance(connection, IOError):
+                    ## On some *nix distros, /dev/random is 0600 root:root and
+                    ## we get a permissions error when trying to read
+                    if connection.message.find("[Errno 13]"):
+                        raise InsufficientPrivileges(
+                            "%s" % connection.message.split("[Errno 13]", 1)[1])
+                elif isinstance(connection, socket_error):
+                    if connection.message.find("[Errno 101]"):
+                        raise HostUnreachableError(
+                            "Host unreachable: %s:%s" % (addr, port))
+                elif isinstance(connection, Exception):
+                    log.debug("connectionFailed: got Exception:")
+                    log.err("Connection failed with reason: %s"
+                            % connection.message)
+                else:
+                    log.err("Connection failed with reason: %s" % str(connection))
+
+            self.report['host'] = addr
+            self.report['port'] = port
+            self.report['state'] = 'CONNECTION_FAILED'
+
+            return connection
+
+        def connectionSucceeded(connection, host, timeout):
+            """If we have created a connection, set the socket options, and log
+            the connection state and peer name.
+
+            :param connection: A :class:`OpenSSL.SSL.Connection <Connection>`.
+            :param tuple host: A tuple of the remote host's IP address as a
+                string, and an integer specifying the remote host port, i.e.
+                ('1.1.1.1',443)
+            """
+
+            ## xxx TODO to get this to work with a non-blocking socket, see how
+            ##     twisted.internet.tcp.Client handles socket objects.
+            connection.setblocking(1)
+
+            ## Set the timeout on the connection:
+            ##
+            ## We want to set SO_RCVTIMEO and SO_SNDTIMEO, which both are
+            ## defined in the socket option definitions in <sys/socket.h>, and
+            ## which both take as their value, according to socket(7), a
+            ## struct timeval, which is defined in the libc manual:
+            ## https://www.gnu.org/software/libc/manual/html_node/Elapsed-Time.html
+            timeval = struct.pack('ll', int(timeout), 0)
+            connection.setsockopt(socket.SOL_SOCKET, socket.SO_RCVTIMEO, timeval)
+            connection.setsockopt(socket.SOL_SOCKET, socket.SO_SNDTIMEO, timeval)
+
+            ## Set the connection state to client mode:
+            connection.set_connect_state()
+
+            peer_name, peer_port = connection.getpeername()
+            if peer_name:
+                log.msg("Connected to %s" % peer_name)
+            else:
+                log.debug("Couldn't get peer name from connection: %s" % host)
+                log.msg("Connected to %s" % host)
+            log.debug("Connection state: %s " % connection.state_string())
+
+            return connection
+
+        def connectionRenegotiate(connection, host, error_message):
+            """Handle a server-initiated SSL/TLS handshake renegotiation.
+
+            :param connection: A :class:`OpenSSL.SSL.Connection <Connection>`.
+            :param tuple host: A tuple of the remote host's IP address as a
+                string, and an integer specifying the remote host port, i.e.
+                ('1.1.1.1',443)
+            """
+
+            log.msg("Server requested renegotiation from: %s" % host)
+            log.debug("Renegotiation reason: %s" % error_message)
+            log.debug("State: %s" % connection.state_string())
+
+            if connection.renegotiate():
+                log.debug("Renegotiation possible.")
+                log.msg("Retrying handshake with %s..." % host)
+                try:
+                    connection.do_handshake()
+                    while connection.renegotiate_pending():
+                        log.msg("Renegotiation with %s in progress..." % host)
+                        log.debug("State: %s" % connection.state_string())
+                        sleep(1)
+                    else:
+                        log.msg("Renegotiation with %s complete!" % host)
+                except SSL.WantReadError, wre:
+                    connection = handleWantRead(connection)
+                    log.debug("State: %s" % connection.state_string())
+                except SSL.WantWriteError, wwe:
+                    connection = handleWantWrite(connection)
+                    log.debug("State: %s" % connection.state_string())
+            return connection
+
+        def connectionShutdown(connection, host):
+            """Handle shutting down a :class:`OpenSSL.SSL.Connection
+            <Connection>`, including correct handling of halfway shutdown
+            connections.
+
+            Calls to :meth:`OpenSSL.SSL.Connection.shutdown
+            <Connection.shutdown()>` return a boolean value -- if the
+            connection is already shutdown, it returns True, else it returns
+            false. Thus we loop through a block which detects if the connection
+            is an a partial shutdown state and corrects that if that is the
+            case, else it waits for one second, then attempts shutting down the
+            connection again.
+
+            Detection of a partial shutdown state is done through
+            :meth:`OpenSSL.SSL.Connection.get_shutdown
+            <Connection.get_shutdown()>` which queries OpenSSL for a bitvector
+            of the server and client shutdown states. For example, the binary
+            string '0b00' is an open connection, and '0b10' is a partially
+            closed connection that has been shutdown on the serverside.
+
+            :param connection: A :class:`OpenSSL.SSL.Connection <Connection>`.
+            :param tuple host: A tuple of the remote host's IP address as a
+                string, and an integer specifying the remote host port, i.e.
+                ('1.1.1.1',443)
+            """
+
+            peername, peerport = host
+
+            if isinstance(connection, SSL.Connection):
+                log.msg("Closing connection to %s:%d..." % (peername, peerport))
+                while not connection.shutdown():
+                    ## if the connection is halfway shutdown, we have to
+                    ## wait for a ZeroReturnError on connection.recv():
+                    if (bin(connection.get_shutdown()) == '0b01') \
+                            or (bin(connection.get_shutdown()) == '0b10'):
+                        try:
+                            _read_buffer = connection.pending()
+                            connection.recv(_read_buffer)
+                        except SSL.ZeroReturnError, zre: continue
+                    else:
+                        sleep(1)
+                else:
+                    log.msg("Closed connection to %s:%d"
+                            % (peername, peerport))
+            elif isinstance(connection, types.NoneType):
+                log.debug("connectionShutdown: got NoneType for connection")
+                return
+            else:
+                log.debug("connectionShutdown: expected connection, got %r"
+                          % connection.__repr__())
+
+            return connection
+
+        def handleWantRead(connection):
+            """From OpenSSL memory BIO documentation on ssl_read():
+
+                If the underlying BIO is blocking, SSL_read() will only
+                return, once the read operation has been finished or an error
+                occurred, except when a renegotiation take place, in which
+                case a SSL_ERROR_WANT_READ may occur. This behaviour can be
+                controlled with the SSL_MODE_AUTO_RETRY flag of the
+                SSL_CTX_set_mode(3) call.
+
+                If the underlying BIO is non-blocking, SSL_read() will also
+                return when the underlying BIO could not satisfy the needs of
+                SSL_read() to continue the operation. In this case a call to
+                SSL_get_error(3) with the return value of SSL_read() will
+                yield SSL_ERROR_WANT_READ or SSL_ERROR_WANT_WRITE. As at any
+                time a re-negotiation is possible, a call to SSL_read() can
+                also cause write operations!  The calling process then must
+                repeat the call after taking appropriate action to satisfy the
+                needs of SSL_read(). The action depends on the underlying
+                BIO. When using a non-blocking socket, nothing is to be done,
+                but select() can be used to check for the required condition.
+
+            And from the OpenSSL memory BIO documentation on ssl_get_error():
+
+                SSL_ERROR_WANT_READ, SSL_ERROR_WANT_WRITE
+
+                The operation did not complete; the same TLS/SSL I/O function
+                should be called again later. If, by then, the underlying BIO
+                has data available for reading (if the result code is
+                SSL_ERROR_WANT_READ) or allows writing data
+                (SSL_ERROR_WANT_WRITE), then some TLS/SSL protocol progress
+                will take place, i.e. at least part of an TLS/SSL record will
+                be read or written. Note that the retry may again lead to a
+                SSL_ERROR_WANT_READ or SSL_ERROR_WANT_WRITE condition. There
+                is no fixed upper limit for the number of iterations that may
+                be necessary until progress becomes visible at application
+                protocol level.
+
+                For socket BIOs (e.g. when SSL_set_fd() was used), select() or
+                poll() on the underlying socket can be used to find out when
+                the TLS/SSL I/O function should be retried.
+
+                Caveat: Any TLS/SSL I/O function can lead to either of
+                SSL_ERROR_WANT_READ and SSL_ERROR_WANT_WRITE. In particular,
+                SSL_read() or SSL_peek() may want to write data and
+                SSL_write() may want to read data. This is mainly because
+                TLS/SSL handshakes may occur at any time during the protocol
+                (initiated by either the client or the server); SSL_read(),
+                SSL_peek(), and SSL_write() will handle any pending
+                handshakes.
+
+            Also, see http://stackoverflow.com/q/3952104
+            """
+            try:
+                while connection.want_read():
+                    self.state = connection.state_string()
+                    log.debug("Connection to %s HAS want_read" % host)
+                    _read_buffer = connection.pending()
+                    log.debug("Rereading %d bytes..." % _read_buffer)
+                    sleep(1)
+                    rereceived = connection.recv(int(_read_buffer))
+                    log.debug("Received %d bytes" % rereceived)
+                    log.debug("State: %s" % connection.state_string())
+                else:
+                    self.state = connection.state_string()
+                    peername, peerport = connection.getpeername()
+                    log.debug("Connection to %s:%s DOES NOT HAVE want_read"
+                              % (peername, peerport))
+                    log.debug("State: %s" % connection.state_string())
+            except SSL.WantWriteError, wwe:
+                self.state = connection.state_string()
+                log.debug("Got WantWriteError while handling want_read")
+                log.debug("WantWriteError: %s" % wwe.message)
+                log.debug("Switching to handleWantWrite()...")
+                handleWantWrite(connection)
+            return connection
+
+        def handleWantWrite(connection):
+            """See :func:HandshakeTest.test_hanshake.handleWantRead """
+            try:
+                while connection.want_write():
+                    self.state = connection.state_string()
+                    log.debug("Connection to %s HAS want_write" % host)
+                    sleep(1)
+                    resent = connection.send("o\r\n")
+                    log.debug("Sent: %d" % resent)
+                    log.debug("State: %s" % connection.state_string())
+            except SSL.WantReadError, wre:
+                self.state = connection.state_string()
+                log.debug("Got WantReadError while handling want_write")
+                log.debug("WantReadError: %s" % wre.message)
+                log.debug("Switching to handleWantRead()...")
+                handleWantRead(connection)
+            return connection
+
+        def doHandshake(connection):
+            """Attempt a TLS/SSL handshake with the host.
+
+            If, after the first attempt at handshaking, OpenSSL's memory BIO
+            state machine does not report success, then try reading and
+            writing from the connection, and handle any SSL_ERROR_WANT_READ or
+            SSL_ERROR_WANT_WRITE which occurs.
+
+            If multiple want_reads occur, then try renegotiation with the
+            host, and start over. If multiple want_writes occur, then it is
+            possible that the connection has timed out, and move on to the
+            connectionShutdown step.
+
+            :param connection: A :class:`OpenSSL.SSL.Connection <Connection>`.
+            :ivar peername: The host IP address, as reported by
+                :meth:`Connection.getpeername <connection.getpeername()>`.
+            :ivar peerport: The host port, reported by
+                :meth:`Connection.getpeername <connection.getpeername()>`.
+            :ivar int sent: The number of bytes sent to to the remote host.
+            :ivar int received: The number of bytes received from the remote
+                                host.
+            :ivar int _read_buffer: The max bytes that can be read from the
+                                    connection.
+            :returns: The :param:`doHandshake.connection <connection>` with
+                      handshake completed, else the unhandled error that was
+                      raised.
+            """
+            peername, peerport = connection.getpeername()
+
+            try:
+                log.msg("Attempting handshake: %s" % peername)
+                connection.do_handshake()
+            except OpenSSL.SSL.WantReadError() as wre:
+                self.state = connection.state_string()
+                log.debug("Handshake state: %s" % self.state)
+                log.debug("doHandshake: WantReadError on first handshake attempt.")
+                connection = handleWantRead(connection)
+            except OpenSSL.SSL.WantWriteError() as wwe:
+                self.state = connection.state_string()
+                log.debug("Handshake state: %s" % self.state)
+                log.debug("doHandshake: WantWriteError on first handshake attempt.")
+                connection = handleWantWrite(connection)
+            else:
+                self.state = connection.state_string()
+
+            if self.state == 'SSL negotiation finished successfully':
+                ## jump to handshakeSuccessful and get certchain
+                return connection
+            else:
+                sent = connection.send("o\r\n")
+                self.state = connection.state_string()
+                log.debug("Handshake state: %s" % self.state)
+                log.debug("Transmitted %d bytes" % sent)
+
+                _read_buffer = connection.pending()
+                log.debug("Max bytes in receive buffer: %d" % _read_buffer)
+
+                try:
+                    received = connection.recv(int(_read_buffer))
+                except SSL.WantReadError, wre:
+                    if connection.want_read():
+                        self.state = connection.state_string()
+                        connection = handleWantRead(connection)
+                    else:
+                        ## if we still have an SSL_ERROR_WANT_READ, then try to
+                        ## renegotiate
+                        self.state = connection.state_string()
+                        connection = connectionRenegotiate(connection,
+                                                           connection.getpeername(),
+                                                           wre.message)
+                except SSL.WantWriteError, wwe:
+                    self.state = connection.state_string()
+                    log.debug("Handshake state: %s" % self.state)
+                    if connection.want_write():
+                        connection = handleWantWrite(connection)
+                    else:
+                        raise ConnectionTimeout("Connection to %s:%d timed out."
+                                                % (peername, peerport))
+                else:
+                    log.msg("Received: %s" % received)
+                    self.state = connection.state_string()
+                    log.debug("Handshake state: %s" % self.state)
+
+            return connection
+
+        def handshakeSucceeded(connection):
+            """Get the details from the server certificate, cert chain, and
+            server ciphersuite list, and put them in our report.
+
+            WARNING: do *not* do this:
+            >>> server_cert.get_pubkey()
+                <OpenSSL.crypto.PKey at 0x4985d28>
+            >>> pk = server_cert.get_pubkey()
+            >>> pk.check()
+                Segmentation fault
+
+            :param connection: A :class:`OpenSSL.SSL.Connection <Connection>`.
+            :returns: :param:`handshakeSucceeded.connection <connection>`.
+            """
+            host, port = connection.getpeername()
+            log.msg("Handshake with %s:%d successful!" % (host, port))
+
+            server_cert = self.getPeerCert(connection)
+            server_cert_chain = self.getPeerCert(connection, get_chain=True)
+
+            renegotiations = connection.total_renegotiations()
+            cipher_list    = connection.get_cipher_list()
+            session_key    = connection.master_key()
+            rawcert        = connection.get_peer_certificate()
+            ## xxx TODO this hash needs to be formatted as SHA1, not long
+            cert_subj_hash = rawcert.subject_name_hash()
+            cert_serial    = rawcert.get_serial_number()
+            cert_sig_algo  = rawcert.get_signature_algorithm()
+            cert_subject   = self.getX509Name(rawcert.get_subject(),
+                                              get_components=True)
+            cert_issuer    = self.getX509Name(rawcert.get_issuer(),
+                                              get_components=True)
+            cert_pubkey    = self.getPublicKey(rawcert.get_pubkey())
+
+            self.report['host'] = host
+            self.report['port'] = port
+            self.report['state'] = self.state
+            self.report['renegotiations'] = renegotiations
+            self.report['server_cert'] = server_cert
+            self.report['server_cert_chain'] = \
+                ''.join([cert for cert in server_cert_chain])
+            self.report['server_ciphersuite'] = cipher_list
+            self.report['cert_subject'] = cert_subject
+            self.report['cert_subj_hash'] = cert_subj_hash
+            self.report['cert_issuer'] = cert_issuer
+            self.report['cert_public_key'] = cert_pubkey
+            self.report['cert_serial_no'] = cert_serial
+            self.report['cert_sig_algo'] = cert_sig_algo
+            ## The session's master key is only valid for that session, and
+            ## will allow us to decrypt any packet captures (if they were
+            ## collected). Because we are not requesting URLs, only host:port
+            ## (which would be visible in pcaps anyway, since the FQDN is
+            ## never encrypted) I do not see a way for this to log any user or
+            ## identifying information. Correct me if I'm wrong.
+            self.report['session_key'] = session_key
+
+            log.msg("Server certificate:\n\n%s" % server_cert)
+            log.msg("Server certificate chain:\n\n%s"
+                    % ''.join([cert for cert in server_cert_chain]))
+            log.msg("Negotiated ciphersuite:\n%s"
+                    % '\n\t'.join([cipher for cipher in cipher_list]))
+            log.msg("Certificate subject: %s" % cert_subject)
+            log.msg("Certificate subject hash: %d" % cert_subj_hash)
+            log.msg("Certificate issuer: %s" % cert_issuer)
+            log.msg("Certificate public key:\n\n%s" % cert_pubkey)
+            log.msg("Certificate signature algorithm: %s" % cert_sig_algo)
+            log.msg("Certificate serial number: %s" % cert_serial)
+            log.msg("Total renegotiations: %d" % renegotiations)
+
+            return connection
+
+        def handshakeFailed(connection, host):
+            """Handle a failed handshake attempt and report the failure reason.
+
+            :type connection: :class:`twisted.python.failure.Failure <Failure>`
+                or :exc:Exception
+            :param connection: The failed connection.
+            :param tuple host: A tuple of the remote host's IP address as a
+                string, and an integer specifying the remote host port, i.e.
+                ('1.1.1.1',443)
+            :returns: None
+            """
+            addr, port = host
+            log.msg("Handshake with %s:%d failed!" % host)
+
+            self.report['host'] = host
+            self.report['port'] = port
+
+            if isinstance(connection, Exception) \
+                    or isinstance(connection, ConnectionTimeout):
+                log.msg("Handshake failed with reason: %s" % connection.message)
+                self.report['state'] = connection.message
+            elif isinstance(connection, failure.Failure):
+                log.msg("Handshake failed with reason: Socket %s"
+                        % connection.getErrorMessage())
+                self.report['state'] = connection.getErrorMessage()
+                ctmo = connection.trap(ConnectionTimeout)
+                if ctmo == ConnectionTimeout:
+                    connection.cleanFailure()
+            else:
+                log.msg("Handshake failed with reason: %s" % str(connection))
+                if not 'state' in self.report.keys():
+                    self.report['state'] = str(connection)
+
+            return None
+
+        def deferMakeConnection(host):
+            return threads.deferToThread(makeConnection, self.input)
+
+        if self.host and not self.input:
+            self.input = self.splitInput(self.host)
+        log.msg("Beginning handshake test for %s:%s" % self.input)
+
+        connection = deferMakeConnection(self.input)
+        connection.addCallbacks(connectionSucceeded, connectionFailed,
+                                callbackArgs=[self.input, self.timeout],
+                                errbackArgs=[self.input])
+
+        handshake = defer.Deferred()
+        handshake.addCallback(doHandshake)
+        handshake.addCallbacks(handshakeSucceeded, handshakeFailed,
+                               errbackArgs=[self.input])
+
+        connection.chainDeferred(handshake)
+        connection.addCallbacks(connectionShutdown, defer.passthru,
+                                callbackArgs=[self.input])
+        connection.addBoth(log.exception)
+
+        return connection
diff --git a/ooni/nettests/manipulation/__init__.py b/ooni/nettests/manipulation/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/ooni/nettests/manipulation/captiveportal.py b/ooni/nettests/manipulation/captiveportal.py
new file mode 100644
index 0000000..a0f8c6b
--- /dev/null
+++ b/ooni/nettests/manipulation/captiveportal.py
@@ -0,0 +1,650 @@
+# -*- coding: utf-8 -*-
+# captiveportal
+# *************
+#
+# This test is a collection of tests to detect the presence of a
+# captive portal. Code is taken, in part, from the old ooni-probe,
+# which was written by Jacob Appelbaum and Arturo Filastò.
+#
+# This module performs multiple tests that match specific vendor captive
+# portal tests. This is a basic internet captive portal filter tester written
+# for RECon 2011.
+#
+# Read the following URLs to understand the captive portal detection process
+# for various vendors:
+#
+# http://technet.microsoft.com/en-us/library/cc766017%28WS.10%29.aspx
+# http://blog.superuser.com/2011/05/16/windows-7-network-awareness/
+# http://isc.sans.org/diary.html?storyid=10312&;
+# http://src.chromium.org/viewvc/chrome?view=rev&revision=74608
+# http://code.google.com/p/chromium-os/issues/detail?3281ttp,
+# http://crbug.com/52489
+# http://crbug.com/71736
+# https://bugzilla.mozilla.org/show_bug.cgi?id=562917
+# https://bugzilla.mozilla.org/show_bug.cgi?id=603505
+# http://lists.w3.org/Archives/Public/ietf-http-wg/2011JanMar/0086.html
+# http://tools.ietf.org/html/draft-nottingham-http-portal-02
+#
+# :authors: Jacob Appelbaum, Arturo Filastò, Isis Lovecruft
+# :license: see LICENSE for more details
+
+import base64
+import os
+import random
+import re
+import string
+import urllib2
+from urlparse import urlparse
+
+from twisted.python import usage
+from twisted.internet import defer, threads
+
+from ooni import nettest
+from ooni.templates import httpt
+from ooni.utils import net
+from ooni.utils import log
+
+try:
+    from dns import resolver
+except ImportError:
+    print "The dnspython module was not found:"
+    print "See https://crate.io/packages/dnspython/";
+    resolver = None
+
+__plugoo__ = "captiveportal"
+__desc__ = "Captive portal detection test"
+
+class UsageOptions(usage.Options):
+    optParameters = [['asset', 'a', None, 'Asset file'],
+                 ['experiment-url', 'e', 'http://google.com/', 'Experiment URL'],
+                 ['user-agent', 'u', random.choice(net.userAgents),
+                  'User agent for HTTP requests']
+                ]
+
+class CaptivePortal(nettest.NetTestCase):
+    """
+    Compares content and status codes of HTTP responses, and attempts
+    to determine if content has been altered.
+    """
+
+    name = "captivep"
+    description = "Captive Portal Test"
+    version = '0.2'
+    author = "Isis Lovecruft"
+    usageOptions = UsageOptions
+
+    def http_fetch(self, url, headers={}):
+        """
+        Parses an HTTP url, fetches it, and returns a urllib2 response
+        object.
+        """
+        url = urlparse(url).geturl()
+        request = urllib2.Request(url, None, headers)
+        #XXX: HTTP Error 302: The HTTP server returned a redirect error that
+        #would lead to an infinite loop.  The last 30x error message was: Found
+        try:
+            response = urllib2.urlopen(request)
+            response_headers = dict(response.headers)
+            return response, response_headers
+        except urllib2.HTTPError, e:
+            log.err("HTTPError: %s" % e)
+            return None, None
+
+    def http_content_match_fuzzy_opt(self, experimental_url, control_result,
+                                     headers=None, fuzzy=False):
+        """
+        Makes an HTTP request on port 80 for experimental_url, then
+        compares the response_content of experimental_url with the
+        control_result. Optionally, if the fuzzy parameter is set to
+        True, the response_content is compared with a regex of the
+        control_result. If the response_content from the
+        experimental_url and the control_result match, returns True
+        with the HTTP status code and headers; False, status code, and
+        headers if otherwise.
+        """
+
+        if headers is None:
+            default_ua = self.local_options['user-agent']
+            headers = {'User-Agent': default_ua}
+
+        response, response_headers = self.http_fetch(experimental_url, headers)
+
+        response_content = response.read() if response else None
+        response_code = response.code if response else None
+        if response_content is None:
+            log.err("HTTP connection appears to have failed.")
+            return False, False, False
+
+        if fuzzy:
+            pattern = re.compile(control_result)
+            match = pattern.search(response_content)
+            log.msg("Fuzzy HTTP content comparison for experiment URL")
+            log.msg("'%s'" % experimental_url)
+            if not match:
+                log.msg("does not match!")
+                return False, response_code, response_headers
+            else:
+                log.msg("and the expected control result yielded a match.")
+                return True, response_code, response_headers
+        else:
+            if str(response_content) != str(control_result):
+                log.msg("HTTP content comparison of experiment URL")
+                log.msg("'%s'" % experimental_url)
+                log.msg("and the expected control result do not match.")
+                return False, response_code, response_headers
+            else:
+                return True, response_code, response_headers
+
+    def http_status_code_match(self, experiment_code, control_code):
+        """
+        Compare two HTTP status codes, returns True if they match.
+        """
+        return int(experiment_code) == int(control_code)
+
+    def http_status_code_no_match(self, experiment_code, control_code):
+        """
+        Compare two HTTP status codes, returns True if they do not match.
+        """
+        return int(experiment_code) != int(control_code)
+
+    def dns_resolve(self, hostname, nameserver=None):
+        """
+        Resolves hostname(s) though nameserver to corresponding
+        address(es). hostname may be either a single hostname string,
+        or a list of strings. If nameserver is not given, use local
+        DNS resolver, and if that fails try using 8.8.8.8.
+        """
+        if not resolver:
+            log.msg("dnspython is not installed.\
+                    Cannot perform DNS Resolve test")
+            return []
+        if isinstance(hostname, str):
+            hostname = [hostname]
+
+        if nameserver is not None:
+            res = resolver.Resolver(configure=False)
+            res.nameservers = [nameserver]
+        else:
+            res = resolver.Resolver()
+
+        response = []
+        answer = None
+
+        for hn in hostname:
+            try:
+                answer = res.query(hn)
+            except resolver.NoNameservers:
+                res.nameservers = ['8.8.8.8']
+                try:
+                    answer = res.query(hn)
+                except resolver.NXDOMAIN:
+                    log.msg("DNS resolution for %s returned NXDOMAIN" % hn)
+                    response.append('NXDOMAIN')
+            except resolver.NXDOMAIN:
+                log.msg("DNS resolution for %s returned NXDOMAIN" % hn)
+                response.append('NXDOMAIN')
+            finally:
+                if not answer:
+                    return response
+                for addr in answer:
+                    response.append(addr.address)
+        return response
+
+    def dns_resolve_match(self, experiment_hostname, control_address):
+        """
+        Resolve experiment_hostname, and check to see that it returns
+        an experiment_address which matches the control_address.  If
+        they match, returns True and experiment_address; otherwise
+        returns False and experiment_address.
+        """
+        experiment_address = self.dns_resolve(experiment_hostname)
+        if not experiment_address:
+            log.debug("dns_resolve() for %s failed" % experiment_hostname)
+            return None, experiment_address
+
+        if len(set(experiment_address) & set([control_address])) > 0:
+            return True, experiment_address
+        else:
+            log.msg("DNS comparison of control '%s' does not" % control_address)
+            log.msg("match experiment response '%s'" % experiment_address)
+            return False, experiment_address
+
+    def get_auth_nameservers(self, hostname):
+        """
+        Many CPs set a nameserver to be used. Let's query that
+        nameserver for the authoritative nameservers of hostname.
+
+        The equivalent of:
+        $ dig +short NS ooni.nu
+        """
+        if not resolver:
+            log.msg("dnspython not installed.")
+            log.msg("Cannot perform test.")
+            return []
+
+        res = resolver.Resolver()
+        answer = res.query(hostname, 'NS')
+        auth_nameservers = []
+        for auth in answer:
+            auth_nameservers.append(auth.to_text())
+        return auth_nameservers
+
+    def hostname_to_0x20(self, hostname):
+        """
+        MaKEs yOur HOsTnaME lOoK LiKE THis.
+
+        For more information, see:
+        D. Dagon, et. al. "Increased DNS Forgery Resistance
+        Through 0x20-Bit Encoding". Proc. CSS, 2008.
+        """
+        hostname_0x20 = ''
+        for char in hostname:
+            l33t = random.choice(['caps', 'nocaps'])
+            if l33t == 'caps':
+                hostname_0x20 += char.capitalize()
+            else:
+                hostname_0x20 += char.lower()
+        return hostname_0x20
+
+    def check_0x20_to_auth_ns(self, hostname, sample_size=None):
+        """
+        Resolve a 0x20 DNS request for hostname over hostname's
+        authoritative nameserver(s), and check to make sure that
+        the capitalization in the 0x20 request matches that of the
+        response. Also, check the serial numbers of the SOA (Start
+        of Authority) records on the authoritative nameservers to
+        make sure that they match.
+
+        If sample_size is given, a random sample equal to that number
+        of authoritative nameservers will be queried; default is 5.
+        """
+        log.msg("")
+        log.msg("Testing random capitalization of DNS queries...")
+        log.msg("Testing that Start of Authority serial numbers match...")
+
+        auth_nameservers = self.get_auth_nameservers(hostname)
+
+        if sample_size is None:
+            sample_size = 5
+            resolved_auth_ns = random.sample(self.dns_resolve(auth_nameservers),
+                                             sample_size)
+
+        querynames = []
+        answernames = []
+        serials = []
+
+        # Even when gevent monkey patching is on, the requests here
+        # are sent without being 0x20'd, so we need to 0x20 them.
+        hostname = self.hostname_to_0x20(hostname)
+
+        for auth_ns in resolved_auth_ns:
+            res = resolver.Resolver(configure=False)
+            res.nameservers = [auth_ns]
+            try:
+                answer = res.query(hostname, 'SOA')
+            except resolver.Timeout:
+                continue
+            querynames.append(answer.qname.to_text())
+            answernames.append(answer.rrset.name.to_text())
+            for soa in answer:
+                serials.append(str(soa.serial))
+
+        if len(set(querynames).intersection(answernames)) == 1:
+            log.msg("Capitalization in DNS queries and responses match.")
+            name_match = True
+        else:
+            log.msg("The random capitalization '%s' used in" % hostname)
+            log.msg("DNS queries to that hostname's authoritative")
+            log.msg("nameservers does not match the capitalization in")
+            log.msg("the response.")
+            name_match = False
+
+        if len(set(serials)) == 1:
+            log.msg("Start of Authority serial numbers all match.")
+            serial_match = True
+        else:
+            log.msg("Some SOA serial numbers did not match the rest!")
+            serial_match = False
+
+        ret = name_match, serial_match, querynames, answernames, serials
+
+        if name_match and serial_match:
+            log.msg("Your DNS queries do not appear to be tampered.")
+            return ret
+        elif name_match or serial_match:
+            log.msg("Something is tampering with your DNS queries.")
+            return ret
+        elif not name_match and not serial_match:
+            log.msg("Your DNS queries are definitely being tampered with.")
+            return ret
+
+    def get_random_url_safe_string(self, length):
+        """
+        Returns a random url-safe string of specified length, where
+        0 < length <= 256. The returned string will always start with
+        an alphabetic character.
+        """
+        if (length <= 0):
+            length = 1
+        elif (length > 256):
+            length = 256
+
+        random_ascii = base64.urlsafe_b64encode(os.urandom(int(length)))
+
+        while not random_ascii[:1].isalpha():
+            random_ascii = base64.urlsafe_b64encode(os.urandom(int(length)))
+
+        three_quarters = int((len(random_ascii)) * (3.0/4.0))
+        random_string = random_ascii[:three_quarters]
+        return random_string
+
+    def get_random_hostname(self, length=None):
+        """
+        Returns a random hostname with SLD of specified length. If
+        length is unspecified, length=32 is used.
+
+        These *should* all resolve to NXDOMAIN. If they actually
+        resolve to a box that isn't part of a captive portal that
+        would be rather interesting.
+        """
+        if length is None:
+            length = 32
+
+        random_sld = self.get_random_url_safe_string(length)
+
+        # if it doesn't start with a letter, chuck it.
+        while not random_sld[:1].isalpha():
+            random_sld = self.get_random_url_safe_string(length)
+
+        tld_list = ['.com', '.net', '.org', '.info', '.test', '.invalid']
+        random_tld = urllib2.random.choice(tld_list)
+        random_hostname = random_sld + random_tld
+        return random_hostname
+
+    def compare_random_hostnames(self, hostname_count=None, hostname_length=None):
+        """
+        Get hostname_count number of random hostnames with SLD length
+        of hostname_length, and then attempt DNS resolution. If no
+        arguments are given, default to three hostnames of 32 bytes
+        each. These random hostnames *should* resolve to NXDOMAIN,
+        except in the case where a user is presented with a captive
+        portal and remains unauthenticated, in which case the captive
+        portal may return the address of the authentication page.
+
+        If the cardinality of the intersection of the set of resolved
+        random hostnames and the single element control set
+        (['NXDOMAIN']) are equal to one, then DNS properly resolved.
+
+        Returns true if only NXDOMAINs were returned, otherwise returns
+        False with the relative complement of the control set in the
+        response set.
+        """
+        if hostname_count is None:
+            hostname_count = 3
+
+        log.msg("Generating random hostnames...")
+        log.msg("Resolving DNS for %d random hostnames..." % hostname_count)
+
+        control = ['NXDOMAIN']
+        responses = []
+
+        for x in range(hostname_count):
+            random_hostname = self.get_random_hostname(hostname_length)
+            response_match, response_address = self.dns_resolve_match(random_hostname,
+                                                                      control[0])
+            for address in response_address:
+                if response_match is False:
+                    log.msg("Strangely, DNS resolution of the random hostname")
+                    log.msg("%s actually points to %s"
+                             % (random_hostname, response_address))
+                    responses = responses + [address]
+                else:
+                    responses = responses + [address]
+
+        intersection = set(responses) & set(control)
+        relative_complement = set(responses) - set(control)
+        r = set(responses)
+
+        if len(intersection) == 1:
+            log.msg("All %d random hostnames properly resolved to NXDOMAIN."
+                     % hostname_count)
+            return True, relative_complement
+        elif (len(intersection) == 1) and (len(r) > 1):
+            log.msg("Something odd happened. Some random hostnames correctly")
+            log.msg("resolved to NXDOMAIN, but several others resolved to")
+            log.msg("to the following addresses: %s" % relative_complement)
+            return False, relative_complement
+        elif (len(intersection) == 0) and (len(r) == 1):
+            log.msg("All random hostnames resolved to the IP address ")
+            log.msg("'%s', which is indicative of a captive portal." % r)
+            return False, relative_complement
+        else:
+            log.debug("Apparently, pigs are flying on your network, 'cause a")
+            log.debug("bunch of hostnames made from 32-byte random strings")
+            log.debug("just magically resolved to a bunch of random addresses.")
+            log.debug("That is definitely highly improbable. In fact, my napkin")
+            log.debug("tells me that the probability of just one of those")
+            log.debug("hostnames resolving to an address is 1.68e-59, making")
+            log.debug("it nearly twice as unlikely as an MD5 hash collision.")
+            log.debug("Either someone is seriously messing with your network,")
+            log.debug("or else you are witnessing the impossible. %s" % r)
+            return False, relative_complement
+
+    def google_dns_cp_test(self):
+        """
+        Google Chrome resolves three 10-byte random hostnames.
+        """
+        subtest = "Google Chrome DNS-based"
+        log.msg("Running the Google Chrome DNS-based captive portal test...")
+
+        gmatch, google_dns_result = self.compare_random_hostnames(3, 10)
+
+        if gmatch:
+            log.msg("Google Chrome DNS-based captive portal test did not")
+            log.msg("detect a captive portal.")
+            return google_dns_result
+        else:
+            log.msg("Google Chrome DNS-based captive portal test believes")
+            log.msg("you are in a captive portal, or else something very")
+            log.msg("odd is happening with your DNS.")
+            return google_dns_result
+
+    def ms_dns_cp_test(self):
+        """
+        Microsoft "phones home" to a server which will always resolve
+        to the same address.
+        """
+        subtest = "Microsoft NCSI DNS-based"
+
+        log.msg("")
+        log.msg("Running the Microsoft NCSI DNS-based captive portal")
+        log.msg("test...")
+
+        msmatch, ms_dns_result = self.dns_resolve_match("dns.msftncsi.com",
+                                                        "131.107.255.255")
+        if msmatch:
+            log.msg("Microsoft NCSI DNS-based captive portal test did not")
+            log.msg("detect a captive portal.")
+            return ms_dns_result
+        else:
+            log.msg("Microsoft NCSI DNS-based captive portal test ")
+            log.msg("believes you are in a captive portal.")
+            return ms_dns_result
+
+    def run_vendor_dns_tests(self):
+        """
+        Run the vendor DNS tests.
+        """
+        report = {}
+        report['google_dns_cp'] = self.google_dns_cp_test()
+        report['ms_dns_cp'] = self.ms_dns_cp_test()
+
+        return report
+
+    def run_vendor_tests(self, *a, **kw):
+        """
+        These are several vendor tests used to detect the presence of
+        a captive portal. Each test compares HTTP status code and
+        content to the control results and has its own User-Agent
+        string, in order to emulate the test as it would occur on the
+        device it was intended for. Vendor tests are defined in the
+        format:
+        [exp_url, ctrl_result, ctrl_code, ua, test_name]
+        """
+
+        vendor_tests = [['http://www.apple.com/library/test/success.html',
+                         'Success',
+                         '200',
+                         'Mozilla/5.0 (iPhone; U; CPU like Mac OS X; en) AppleWebKit/420+ (KHTML, like Gecko) Version/3.0 Mobile/1A543a Safari/419.3',
+                         'Apple HTTP Captive Portal'],
+                        ['http://tools.ietf.org/html/draft-nottingham-http-portal-02',
+                         '428 Network Authentication Required',
+                         '428',
+                         'Mozilla/5.0 (Windows NT 6.1; rv:5.0) Gecko/20100101 Firefox/5.0',
+                         'W3 Captive Portal'],
+                        ['http://www.msftncsi.com/ncsi.txt',
+                         'Microsoft NCSI',
+                         '200',
+                         'Microsoft NCSI',
+                         'MS HTTP Captive Portal',]]
+
+        cm = self.http_content_match_fuzzy_opt
+        sm = self.http_status_code_match
+        snm = self.http_status_code_no_match
+
+        def compare_content(status_func, fuzzy, experiment_url, control_result,
+                            control_code, headers, test_name):
+            log.msg("")
+            log.msg("Running the %s test..." % test_name)
+
+            content_match, experiment_code, experiment_headers = cm(experiment_url,
+                                                                    control_result,
+                                                                    headers, fuzzy)
+            status_match = status_func(experiment_code, control_code)
+
+            if status_match and content_match:
+                log.msg("The %s test was unable to detect" % test_name)
+                log.msg("a captive portal.")
+                return True
+            else:
+                log.msg("The %s test shows that your network" % test_name)
+                log.msg("is filtered.")
+                return False
+
+        result = []
+        for vt in vendor_tests:
+            report = {}
+            report['vt'] = vt
+
+            experiment_url = vt[0]
+            control_result = vt[1]
+            control_code = vt[2]
+            headers = {'User-Agent': vt[3]}
+            test_name = vt[4]
+
+            args = (experiment_url, control_result, control_code, headers, test_name)
+
+            if test_name == "MS HTTP Captive Portal":
+                report['result'] = compare_content(sm, False, *args)
+
+            elif test_name == "Apple HTTP Captive Portal":
+                report['result'] = compare_content(sm, True, *args)
+
+            elif test_name == "W3 Captive Portal":
+                report['result'] = compare_content(snm, True, *args)
+
+            else:
+                log.err("Ooni is trying to run an undefined CP vendor test.")
+            result.append(report)
+        return result
+
+    def control(self, experiment_result, args):
+        """
+        Compares the content and status code of the HTTP response for
+        experiment_url with the control_result and control_code
+        respectively. If the status codes match, but the experimental
+        content and control_result do not match, fuzzy matching is enabled
+        to determine if the control_result is at least included somewhere
+        in the experimental content. Returns True if matches are found,
+        and False if otherwise.
+        """
+        # XXX put this back to being parametrized
+        #experiment_url = self.local_options['experiment-url']
+        experiment_url = 'http://google.com/'
+        control_result = 'XX'
+        control_code = 200
+        ua = self.local_options['user-agent']
+
+        cm = self.http_content_match_fuzzy_opt
+        sm = self.http_status_code_match
+        snm = self.http_status_code_no_match
+
+        log.msg("Running test for '%s'..." % experiment_url)
+        content_match, experiment_code, experiment_headers = cm(experiment_url,
+                                                                control_result)
+        status_match = sm(experiment_code, control_code)
+        if status_match and content_match:
+            log.msg("The test for '%s'" % experiment_url)
+            log.msg("was unable to detect a captive portal.")
+
+            self.report['result'] = True
+
+        elif status_match and not content_match:
+            log.msg("Retrying '%s' with fuzzy match enabled."
+                     % experiment_url)
+            fuzzy_match, experiment_code, experiment_headers = cm(experiment_url,
+                                                                  control_result,
+                                                                  fuzzy=True)
+            if fuzzy_match:
+                self.report['result'] = True
+            else:
+                log.msg("Found modified content on '%s'," % experiment_url)
+                log.msg("which could indicate a captive portal.")
+
+                self.report['result'] = False
+        else:
+            log.msg("The content comparison test for ")
+            log.msg("'%s'" % experiment_url)
+            log.msg("shows that your HTTP traffic is filtered.")
+
+            self.report['result'] = False
+
+    @defer.inlineCallbacks
+    def test_captive_portal(self):
+        """
+        Runs the CaptivePortal(Test).
+
+        CONFIG OPTIONS
+        --------------
+
+        If "do_captive_portal_vendor_tests" is set to "true", then vendor
+        specific captive portal HTTP-based tests will be run.
+
+        If "do_captive_portal_dns_tests" is set to "true", then vendor
+        specific captive portal DNS-based tests will be run.
+
+        If "check_dns_requests" is set to "true", then Ooni-probe will
+        attempt to check that your DNS requests are not being tampered with
+        by a captive portal.
+
+        If "captive_portal" = "yourfilename.txt", then user-specified tests
+        will be run.
+
+        Any combination of the above tests can be run.
+        """
+
+        log.msg("")
+        log.msg("Running vendor tests...")
+        self.report['vendor_tests'] = yield threads.deferToThread(self.run_vendor_tests)
+
+        log.msg("")
+        log.msg("Running vendor DNS-based tests...")
+        self.report['vendor_dns_tests'] = yield threads.deferToThread(self.run_vendor_dns_tests)
+
+        log.msg("")
+        log.msg("Checking that DNS requests are not being tampered...")
+        self.report['check0x20'] = yield threads.deferToThread(self.check_0x20_to_auth_ns, 'ooni.nu')
+
+        log.msg("")
+        log.msg("Captive portal test finished!")
+
diff --git a/ooni/nettests/manipulation/daphne.py b/ooni/nettests/manipulation/daphne.py
new file mode 100644
index 0000000..09279fa
--- /dev/null
+++ b/ooni/nettests/manipulation/daphne.py
@@ -0,0 +1,119 @@
+# -*- encoding: utf-8 -*-
+from twisted.python import usage
+from twisted.internet import protocol, endpoints, reactor
+
+from ooni import nettest
+from ooni.kit import daphn3
+from ooni.utils import log
+
+class Daphn3ClientProtocol(daphn3.Daphn3Protocol):
+    def nextStep(self):
+        log.debug("Moving on to next step in the state walk")
+        self.current_data_received = 0
+        if self.current_step >= (len(self.steps) - 1):
+            log.msg("Reached the end of the state machine")
+            log.msg("Censorship fingerpint bisected!")
+            step_idx, mutation_idx = self.factory.mutation
+            log.msg("step_idx: %s | mutation_id: %s" % (step_idx, mutation_idx))
+            #self.transport.loseConnection()
+            if self.report:
+                self.report['mutation_idx'] = mutation_idx
+                self.report['step_idx'] = step_idx
+            self.d.callback(None)
+            return
+        else:
+            self.current_step += 1
+        if self._current_step_role() == self.role:
+            # We need to send more data because we are again responsible for
+            # doing so.
+            self.sendPayload()
+
+
+class Daphn3ClientFactory(protocol.ClientFactory):
+    protocol = daphn3.Daphn3Protocol
+    mutation = [0,0]
+    steps = None
+
+    def buildProtocol(self, addr):
+        p = self.protocol()
+        p.steps = self.steps
+        p.factory = self
+        return p
+
+    def startedConnecting(self, connector):
+        log.msg("Started connecting %s" % connector)
+
+    def clientConnectionFailed(self, reason, connector):
+        log.err("We failed connecting the the OONIB")
+        log.err("Cannot perform test. Perhaps it got blocked?")
+        log.err("Please report this to tor-assistants@xxxxxxxxxxxxxx")
+
+    def clientConnectionLost(self, reason, connector):
+        log.err("Daphn3 client connection lost")
+        print reason
+
+class daphn3Args(usage.Options):
+    optParameters = [
+                     ['host', 'h', '127.0.0.1', 'Target Hostname'],
+                     ['port', 'p', 57003, 'Target port number']]
+
+    optFlags = [['pcap', 'c', 'Specify that the input file is a pcap file'],
+                ['yaml', 'y', 'Specify that the input file is a YAML file (default)']]
+
+class daphn3Test(nettest.NetTestCase):
+
+    name = "Daphn3"
+    usageOptions = daphn3Args
+    inputFile = ['file', 'f', None, 
+            'Specify the pcap or YAML file to be used as input to the test']
+
+    #requiredOptions = ['file']
+
+    steps = None
+
+    def inputProcessor(self, filename):
+        """
+        step_idx is the step in the packet exchange
+        ex.
+        [.X.] are packets sent by a client or a server
+
+            client:  [.1.]        [.3.] [.4.]
+            server:         [.2.]             [.5.]
+
+        mutation_idx: is the sub index of the packet as in the byte of the
+        packet at the step_idx that is to be mutated
+
+        """
+        if self.localOptions['pcap']:
+            daphn3Steps = daphn3.read_pcap(filename)
+        else:
+            daphn3Steps = daphn3.read_yaml(filename)
+        log.debug("Loaded these steps %s" % daphn3Steps)
+        yield daphn3Steps
+
+    def test_daphn3(self):
+        host = self.localOptions['host']
+        port = int(self.localOptions['port'])
+
+        def failure(failure):
+            log.msg("Failed to connect")
+            self.report['censored'] = True
+            self.report['mutation'] = 0
+            raise Exception("Error in connection, perhaps the backend is censored")
+            return
+
+        def success(protocol):
+            log.msg("Successfully connected")
+            protocol.sendPayload()
+            return protocol.d
+
+        log.msg("Connecting to %s:%s" % (host, port))
+        endpoint = endpoints.TCP4ClientEndpoint(reactor, host, port)
+        daphn3_factory = Daphn3ClientFactory()
+        daphn3_factory.steps = self.input
+        daphn3_factory.report = self.report
+        d = endpoint.connect(daphn3_factory)
+        d.addErrback(failure)
+        d.addCallback(success)
+        return d
+
diff --git a/ooni/nettests/manipulation/dnsspoof.py b/ooni/nettests/manipulation/dnsspoof.py
new file mode 100644
index 0000000..c9120a4
--- /dev/null
+++ b/ooni/nettests/manipulation/dnsspoof.py
@@ -0,0 +1,70 @@
+from twisted.internet import defer
+from twisted.python import usage
+
+from scapy.all import IP, UDP, DNS, DNSQR
+
+from ooni.templates import scapyt
+from ooni.utils import log
+
+class UsageOptions(usage.Options):
+    optParameters = [['resolver', 'r', None,
+                    'Specify the resolver that should be used for DNS queries (ip:port)'],
+                    ['hostname', 'h', None,
+                        'Specify the hostname of a censored site'],
+                    ['backend', 'b', '8.8.8.8:53',
+                        'Specify the IP address of a good DNS resolver (ip:port)']
+                    ]
+
+
+class DNSSpoof(scapyt.ScapyTest):
+    name = "DNS Spoof"
+    timeout = 2
+
+    usageOptions = UsageOptions
+
+    requiredTestHelpers = {'backend': 'dns'}
+    requiredOptions = ['hostname', 'resolver']
+
+    def setUp(self):
+        self.resolverAddr, self.resolverPort = self.localOptions['resolver'].split(':')
+        self.resolverPort = int(self.resolverPort)
+
+        self.controlResolverAddr, self.controlResolverPort = self.localOptions['backend'].split(':')
+        self.controlResolverPort = int(self.controlResolverPort)
+
+        self.hostname = self.localOptions['hostname']
+
+    def postProcessor(self, report):
+        """
+        This is not tested, but the concept is that if the two responses
+        match up then spoofing is occuring.
+        """
+        try:
+            test_answer = report['test_a_lookup']['answered_packets'][0][1]
+            control_answer = report['test_control_a_lookup']['answered_packets'][0][1]
+        except IndexError:
+            self.report['spoofing'] = 'no_answer'
+            return
+
+        if test_answer[UDP] == control_answer[UDP]:
+                self.report['spoofing'] = True
+        else:
+            self.report['spoofing'] = False
+        return
+
+    @defer.inlineCallbacks
+    def test_a_lookup(self):
+        question = IP(dst=self.resolverAddr)/UDP()/DNS(rd=1,
+                qd=DNSQR(qtype="A", qclass="IN", qname=self.hostname))
+        log.msg("Performing query to %s with %s:%s" % (self.hostname, self.resolverAddr, self.resolverPort))
+        yield self.sr1(question)
+
+    @defer.inlineCallbacks
+    def test_control_a_lookup(self):
+        question = IP(dst=self.controlResolverAddr)/UDP()/DNS(rd=1,
+                qd=DNSQR(qtype="A", qclass="IN", qname=self.hostname))
+        log.msg("Performing query to %s with %s:%s" % (self.hostname,
+            self.controlResolverAddr, self.controlResolverPort))
+        yield self.sr1(question)
+
+
diff --git a/ooni/nettests/manipulation/http_header_field_manipulation.py b/ooni/nettests/manipulation/http_header_field_manipulation.py
new file mode 100644
index 0000000..3423442
--- /dev/null
+++ b/ooni/nettests/manipulation/http_header_field_manipulation.py
@@ -0,0 +1,190 @@
+# -*- encoding: utf-8 -*-
+#
+# :authors: Arturo Filastò
+# :licence: see LICENSE
+
+import random
+import json
+import yaml
+
+from twisted.python import usage
+
+from ooni.utils import log, net, randomStr
+from ooni.templates import httpt
+from ooni.utils.txagentwithsocks import TrueHeaders
+
+def random_capitalization(string):
+    output = ""
+    original_string = string
+    string = string.swapcase()
+    for i in range(len(string)):
+        if random.randint(0, 1):
+            output += string[i].swapcase()
+        else:
+            output += string[i]
+    if original_string == output:
+        return random_capitalization(output)
+    else:
+        return output
+
+class UsageOptions(usage.Options):
+    optParameters = [
+            ['backend', 'b', 'http://127.0.0.1:57001', 
+                'URL of the backend to use for sending the requests'],
+            ['headers', 'h', None,
+                'Specify a yaml formatted file from which to read the request headers to send']
+            ]
+
+class HTTPHeaderFieldManipulation(httpt.HTTPTest):
+    """
+    It performes HTTP requests with request headers that vary capitalization
+    towards a backend. If the headers reported by the server differ from
+    the ones we sent, then we have detected tampering.
+    """
+    name = "HTTP Header Field Manipulation"
+    author = "Arturo Filastò"
+    version = "0.1.3"
+
+    randomizeUA = False
+    usageOptions = UsageOptions
+
+    requiredTestHelpers = {'backend': 'http-return-json-headers'}
+    requiredOptions = ['backend']
+
+    def get_headers(self):
+        headers = {}
+        if self.localOptions['headers']:
+            try:
+                f = open(self.localOptions['headers'])
+            except IOError:
+                raise Exception("Specified input file does not exist")
+            content = ''.join(f.readlines())
+            f.close()
+            headers = yaml.safe_load(content)
+            return headers
+        else:
+            # XXX generate these from a random choice taken from whatheaders.com
+            # http://s3.amazonaws.com/data.whatheaders.com/whatheaders-latest.xml.zip
+            headers = {"User-Agent": [random.choice(net.userAgents)],
+                "Accept": ["text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8"],
+                "Accept-Encoding": ["gzip,deflate,sdch"],
+                "Accept-Language": ["en-US,en;q=0.8"],
+                "Accept-Charset": ["ISO-8859-1,utf-8;q=0.7,*;q=0.3"],
+                "Host": [randomStr(15)+'.com']
+            }
+            return headers
+
+    def get_random_caps_headers(self):
+        headers = {}
+        normal_headers = self.get_headers()
+        for k, v in normal_headers.items():
+            new_key = random_capitalization(k)
+            headers[new_key] = v
+        return headers
+
+    def processInputs(self):
+        if self.localOptions['backend']:
+            self.url = self.localOptions['backend']
+        else:
+            raise Exception("No backend specified")
+
+    def processResponseBody(self, data):
+        self.check_for_tampering(data)
+
+    def check_for_tampering(self, data):
+        """
+        Here we do checks to verify if the request we made has been tampered
+        with. We have 3 categories of tampering:
+
+        *  **total** when the response is not a json object and therefore we were not
+        able to reach the ooniprobe test backend
+
+        *  **request_line_capitalization** when the HTTP Request line (e.x. GET /
+        HTTP/1.1) does not match the capitalization we set.
+
+        *  **header_field_number** when the number of headers we sent does not match
+        with the ones the backend received
+
+        *  **header_name_capitalization** when the header field names do not match
+        those that we sent.
+
+        *  **header_field_value** when the header field value does not match with the
+        one we transmitted.
+        """
+        log.msg("Checking for tampering on %s" % self.url)
+
+        self.report['tampering'] = {
+            'total': False,
+            'request_line_capitalization': False,
+            'header_name_capitalization': False,
+            'header_field_value': False,
+            'header_field_number': False
+        }
+        try:
+            response = json.loads(data)
+        except ValueError:
+            self.report['tampering']['total'] = True
+            return
+
+        request_request_line = "%s / HTTP/1.1" % self.request_method
+
+        try:
+            response_request_line = response['request_line']
+            response_headers_dict = response['headers_dict']
+        except KeyError:
+            self.report['tampering']['total'] = True
+            return
+
+        if request_request_line != response_request_line:
+            self.report['tampering']['request_line_capitalization'] = True
+
+        request_headers = TrueHeaders(self.request_headers)
+        diff = request_headers.getDiff(TrueHeaders(response_headers_dict),
+                ignore=['Connection'])
+        if diff:
+            self.report['tampering']['header_field_name'] = True
+        else:
+            self.report['tampering']['header_field_name'] = False
+        self.report['tampering']['header_name_diff'] = list(diff)
+        log.msg("    total: %(total)s" % self.report['tampering'])
+        log.msg("    request_line_capitalization: %(request_line_capitalization)s" % self.report['tampering'])
+        log.msg("    header_name_capitalization: %(header_name_capitalization)s" % self.report['tampering'])
+        log.msg("    header_field_value: %(header_field_value)s" % self.report['tampering'])
+        log.msg("    header_field_number: %(header_field_number)s" % self.report['tampering'])
+
+    def test_get(self):
+        self.request_method = "GET"
+        self.request_headers = self.get_random_caps_headers()
+        return self.doRequest(self.url, self.request_method,
+                headers=self.request_headers)
+
+    def test_get_random_capitalization(self):
+        self.request_method = random_capitalization("GET")
+        self.request_headers = self.get_random_caps_headers()
+        return self.doRequest(self.url, self.request_method,
+                headers=self.request_headers)
+
+    def test_post(self):
+        self.request_method = "POST"
+        self.request_headers = self.get_headers()
+        return self.doRequest(self.url, self.request_method,
+                headers=self.request_headers)
+
+    def test_post_random_capitalization(self):
+        self.request_method = random_capitalization("POST")
+        self.request_headers = self.get_random_caps_headers()
+        return self.doRequest(self.url, self.request_method,
+                headers=self.request_headers)
+
+    def test_put(self):
+        self.request_method = "PUT"
+        self.request_headers = self.get_headers()
+        return self.doRequest(self.url, self.request_method,
+                headers=self.request_headers)
+
+    def test_put_random_capitalization(self):
+        self.request_method = random_capitalization("PUT")
+        self.request_headers = self.get_random_caps_headers()
+        return self.doRequest(self.url, self.request_method,
+                headers=self.request_headers)
+
diff --git a/ooni/nettests/manipulation/http_host.py b/ooni/nettests/manipulation/http_host.py
new file mode 100644
index 0000000..2ec517c
--- /dev/null
+++ b/ooni/nettests/manipulation/http_host.py
@@ -0,0 +1,152 @@
+# -*- encoding: utf-8 -*-
+#
+# HTTP Host Test
+# **************
+#
+# :authors: Arturo Filastò
+# :licence: see LICENSE
+
+import json
+from twisted.python import usage
+
+from ooni.utils import randomStr, randomSTR
+
+from ooni.utils import log
+from ooni.templates import httpt
+
+class UsageOptions(usage.Options):
+    optParameters = [['backend', 'b', 'http://127.0.0.1:57001',
+                      'URL of the test backend to use. Should be \
+                              listening on port 80 and be a \
+                              HTTPReturnJSONHeadersHelper'],
+                     ['content', 'c', None, 'The file to read \
+                            from containing the content of a block page']]
+
+class HTTPHost(httpt.HTTPTest):
+    """
+    This test is aimed at detecting the presence of a transparent HTTP proxy
+    and enumerating the sites that are being censored by it.
+
+    It places inside of the Host header field the hostname of the site that is
+    to be tested for censorship and then determines if the probe is behind a
+    transparent HTTP proxy (because the response from the backend server does
+    not match) and if the site is censorsed, by checking if the page that it
+    got back matches the input block page.
+    """
+    name = "HTTP Host"
+    author = "Arturo Filastò"
+    version = "0.2.3"
+
+    randomizeUA = False
+    usageOptions = UsageOptions
+
+    inputFile = ['file', 'f', None,
+            'List of hostnames to test for censorship']
+
+    requiredTestHelpers = {'backend': 'http-return-json-headers'}
+    requiredOptions = ['backend']
+
+    def test_filtering_prepend_newline_to_method(self):
+        headers = {}
+        headers["Host"] = [self.input]
+        return self.doRequest(self.localOptions['backend'], method="\nGET",
+                headers=headers)
+
+    def test_filtering_add_tab_to_host(self):
+        headers = {}
+        headers["Host"] = [self.input + '\t']
+        return self.doRequest(self.localOptions['backend'],
+                headers=headers)
+
+    def test_filtering_of_subdomain(self):
+        headers = {}
+        headers["Host"] = [randomStr(10) + '.' + self.input]
+        return self.doRequest(self.localOptions['backend'],
+                headers=headers)
+
+    def test_filtering_via_fuzzy_matching(self):
+        headers = {}
+        headers["Host"] = [randomStr(10) + self.input + randomStr(10)]
+        return self.doRequest(self.localOptions['backend'],
+                headers=headers)
+
+    def test_send_host_header(self):
+        """
+        Stuffs the HTTP Host header field with the site to be tested for
+        censorship and does an HTTP request of this kind to our backend.
+
+        We randomize the HTTP User Agent headers.
+        """
+        headers = {}
+        headers["Host"] = [self.input]
+        return self.doRequest(self.localOptions['backend'],
+                headers=headers)
+
+    def check_for_censorship(self, body):
+        """
+        If we have specified what a censorship page looks like here we will
+        check if the page we are looking at matches it.
+
+        XXX this is not tested, though it is basically what was used to detect
+        censorship in the palestine case.
+        """
+        if self.localOptions['content']:
+            self.report['censored'] = True
+            censorship_page = open(self.localOptions['content'])
+            response_page = iter(body.split("\n"))
+
+            for censorship_line in censorship_page.xreadlines():
+                response_line = response_page.next()
+                if response_line != censorship_line:
+                    self.report['censored'] = False
+                    break
+
+            censorship_page.close()
+        else:
+            self.report['censored'] = None
+
+    def processResponseBody(self, body):
+        """
+        XXX this is to be filled in with either a domclass based classified or
+        with a rule that will allow to detect that the body of the result is
+        that of a censored site.
+        """
+        # If we don't see a json array we know that something is wrong for
+        # sure
+        if not body.startswith("{"):
+            log.msg("This does not appear to be JSON")
+            self.report['transparent_http_proxy'] = True
+            self.check_for_censorship(body)
+            return
+        try:
+            content = json.loads(body)
+        except:
+            log.msg("The json does not parse, this is not what we expected")
+            self.report['transparent_http_proxy'] = True
+            self.check_for_censorship(body)
+            return
+
+        # We base the determination of the presence of a transparent HTTP
+        # proxy on the basis of the response containing the json that is to be
+        # returned by a HTTP Request Test Helper
+        if 'request_headers' in content and \
+                'request_line' in content and \
+                'headers_dict' in content:
+            log.msg("Found the keys I expected in %s" % content)
+            self.report['transparent_http_proxy'] = False
+            self.report['censored'] = False
+        else:
+            log.msg("Did not find the keys I expected in %s" % content)
+            self.report['transparent_http_proxy'] = True
+            self.check_for_censorship(body)
+
+    def inputProcessor(self, filename=None):
+        """
+        This inputProcessor extracts domain names from urls
+        """
+        if filename:
+            fp = open(filename)
+            for x in fp.readlines():
+                yield x.strip().split('//')[-1].split('/')[0]
+            fp.close()
+        else: pass
diff --git a/ooni/nettests/manipulation/http_invalid_request_line.py b/ooni/nettests/manipulation/http_invalid_request_line.py
new file mode 100644
index 0000000..64dbcac
--- /dev/null
+++ b/ooni/nettests/manipulation/http_invalid_request_line.py
@@ -0,0 +1,108 @@
+# -*- encoding: utf-8 -*-
+from twisted.python import usage
+
+from ooni.utils import log
+from ooni.utils import randomStr, randomSTR
+from ooni.templates import tcpt
+
+class UsageOptions(usage.Options):
+    optParameters = [['backend', 'b', '127.0.0.1',
+                        'The OONI backend that runs a TCP echo server'],
+                    ['backendport', 'p', 80, 'Specify the port that the TCP echo server is running (should only be set for debugging)']]
+
+class HTTPInvalidRequestLine(tcpt.TCPTest):
+    """
+    The goal of this test is to do some very basic and not very noisy fuzzing
+    on the HTTP request line. We generate a series of requests that are not
+    valid HTTP requests.
+
+    Unless elsewhere stated 'Xx'*N refers to N*2 random upper or lowercase
+    ascii letters or numbers ('XxXx' will be 4).
+    """
+    name = "HTTP Invalid Request Line"
+    version = "0.2"
+    authors = "Arturo Filastò"
+
+    usageOptions = UsageOptions
+
+    requiredTestHelpers = {'backend': 'tcp-echo'}
+    requiredOptions = ['backend']
+
+    def setUp(self):
+        self.port = int(self.localOptions['backendport'])
+        self.address = self.localOptions['backend']
+
+    def check_for_manipulation(self, response, payload):
+        log.debug("Checking if %s == %s" % (response, payload))
+        if response != payload:
+            self.report['tampering'] = True
+        else:
+            self.report['tampering'] = False
+
+    def test_random_invalid_method(self):
+        """
+        We test sending data to a TCP echo server listening on port 80, if what
+        we get back is not what we have sent then there is tampering going on.
+        This is for example what squid will return when performing such
+        request:
+
+            HTTP/1.0 400 Bad Request
+            Server: squid/2.6.STABLE21
+            Date: Sat, 23 Jul 2011 02:22:44 GMT
+            Content-Type: text/html
+            Content-Length: 1178
+            Expires: Sat, 23 Jul 2011 02:22:44 GMT
+            X-Squid-Error: ERR_INVALID_REQ 0
+            X-Cache: MISS from cache_server
+            X-Cache-Lookup: NONE from cache_server:3128
+            Via: 1.0 cache_server:3128 (squid/2.6.STABLE21)
+            Proxy-Connection: close
+
+        """
+        payload = randomSTR(4) + " / HTTP/1.1\n\r"
+
+        d = self.sendPayload(payload)
+        d.addCallback(self.check_for_manipulation, payload)
+        return d
+
+    def test_random_invalid_field_count(self):
+        """
+        This generates a request that looks like this:
+
+        XxXxX XxXxX XxXxX XxXxX
+
+        This may trigger some bugs in the HTTP parsers of transparent HTTP
+        proxies.
+        """
+        payload = ' '.join(randomStr(5) for x in range(4))
+        payload += "\n\r"
+
+        d = self.sendPayload(payload)
+        d.addCallback(self.check_for_manipulation, payload)
+        return d
+
+    def test_random_big_request_method(self):
+        """
+        This generates a request that looks like this:
+
+        Xx*512 / HTTP/1.1
+        """
+        payload = randomStr(1024) + ' / HTTP/1.1\n\r'
+
+        d = self.sendPayload(payload)
+        d.addCallback(self.check_for_manipulation, payload)
+        return d
+
+    def test_random_invalid_version_number(self):
+        """
+        This generates a request that looks like this:
+
+        GET / HTTP/XxX
+        """
+        payload = 'GET / HTTP/' + randomStr(3)
+        payload += '\n\r'
+
+        d = self.sendPayload(payload)
+        d.addCallback(self.check_for_manipulation, payload)
+        return d
+
diff --git a/ooni/nettests/manipulation/traceroute.py b/ooni/nettests/manipulation/traceroute.py
new file mode 100644
index 0000000..2db1826
--- /dev/null
+++ b/ooni/nettests/manipulation/traceroute.py
@@ -0,0 +1,144 @@
+# -*- encoding: utf-8 -*-
+#
+# :authors: Arturo Filastò
+# :licence: see LICENSE
+
+from twisted.python import usage
+from twisted.internet import defer
+
+from ooni.templates import scapyt
+
+from scapy.all import *
+
+from ooni.utils import log
+
+class UsageOptions(usage.Options):
+    optParameters = [
+                    ['backend', 'b', '8.8.8.8', 'Test backend to use'],
+                    ['timeout', 't', 5, 'The timeout for the traceroute test'],
+                    ['maxttl', 'm', 30, 'The maximum value of ttl to set on packets'],
+                    ['srcport', 'p', None, 'Set the source port to a specific value (only applies to TCP and UDP)']
+                    ]
+
+class TracerouteTest(scapyt.BaseScapyTest):
+    name = "Multi Protocol Traceroute Test"
+    author = "Arturo Filastò"
+    version = "0.2"
+
+    requiredTestHelpers = {'backend': 'traceroute'}
+    usageOptions = UsageOptions
+    dst_ports = [0, 22, 23, 53, 80, 123, 443, 8080, 65535]
+
+    def setUp(self):
+        def get_sport(protocol):
+            if self.localOptions['srcport']:
+                return int(self.localOptions['srcport'])
+            else:
+                return random.randint(1024, 65535)
+
+        self.get_sport = get_sport
+
+    def max_ttl_and_timeout(self):
+        max_ttl = int(self.localOptions['maxttl'])
+        timeout = int(self.localOptions['timeout'])
+        self.report['max_ttl'] = max_ttl
+        self.report['timeout'] = timeout
+        return max_ttl, timeout
+
+
+    def postProcessor(self, report):
+        tcp_hops = report['test_tcp_traceroute']
+        udp_hops = report['test_udp_traceroute']
+        icmp_hops = report['test_icmp_traceroute']
+
+
+    def test_tcp_traceroute(self):
+        """
+        Does a traceroute to the destination by sending TCP SYN packets
+        with TTLs from 1 until max_ttl.
+        """
+        def finished(packets, port):
+            log.debug("Finished running TCP traceroute test on port %s" % port)
+            answered, unanswered = packets
+            self.report['hops_'+str(port)] = []
+            for snd, rcv in answered:
+                try:
+                    sport = snd[UDP].sport
+                except IndexError:
+                    log.err("Source port for this traceroute was not found. This is probably a bug")
+                    sport = -1
+
+                report = {'ttl': snd.ttl,
+                        'address': rcv.src,
+                        'rtt': rcv.time - snd.time,
+                        'sport': sport
+                }
+                log.debug("%s: %s" % (port, report))
+                self.report['hops_'+str(port)].append(report)
+
+        dl = []
+        max_ttl, timeout = self.max_ttl_and_timeout()
+        for port in self.dst_ports:
+            packets = IP(dst=self.localOptions['backend'],
+                    ttl=(1,max_ttl),id=RandShort())/TCP(flags=0x2, dport=port,
+                            sport=self.get_sport('tcp'))
+
+            d = self.sr(packets, timeout=timeout)
+            d.addCallback(finished, port)
+            dl.append(d)
+        return defer.DeferredList(dl)
+
+    def test_udp_traceroute(self):
+        """
+        Does a traceroute to the destination by sending UDP packets with empty
+        payloads with TTLs from 1 until max_ttl.
+        """
+        def finished(packets, port):
+            log.debug("Finished running UDP traceroute test on port %s" % port)
+            answered, unanswered = packets
+            self.report['hops_'+str(port)] = []
+            for snd, rcv in answered:
+                report = {'ttl': snd.ttl,
+                        'address': rcv.src,
+                        'rtt': rcv.time - snd.time,
+                        'sport': snd[UDP].sport
+                }
+                log.debug("%s: %s" % (port, report))
+                self.report['hops_'+str(port)].append(report)
+        dl = []
+        max_ttl, timeout = self.max_ttl_and_timeout()
+        for port in self.dst_ports:
+            packets = IP(dst=self.localOptions['backend'],
+                    ttl=(1,max_ttl),id=RandShort())/UDP(dport=port,
+                            sport=self.get_sport('udp'))
+
+            d = self.sr(packets, timeout=timeout)
+            d.addCallback(finished, port)
+            dl.append(d)
+        return defer.DeferredList(dl)
+
+    def test_icmp_traceroute(self):
+        """
+        Does a traceroute to the destination by sending ICMP echo request
+        packets with TTLs from 1 until max_ttl.
+        """
+        def finished(packets):
+            log.debug("Finished running ICMP traceroute test")
+            answered, unanswered = packets
+            self.report['hops'] = []
+            for snd, rcv in answered:
+                report = {'ttl': snd.ttl,
+                        'address': rcv.src,
+                        'rtt': rcv.time - snd.time
+                }
+                log.debug("%s" % (report))
+                self.report['hops'].append(report)
+        dl = []
+        max_ttl, timeout = self.max_ttl_and_timeout()
+        packets = IP(dst=self.localOptions['backend'],
+                    ttl=(1,max_ttl), id=RandShort())/ICMP()
+
+        d = self.sr(packets, timeout=timeout)
+        d.addCallback(finished)
+        return d
+
diff --git a/ooni/nettests/scanning/__init__.py b/ooni/nettests/scanning/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/ooni/nettests/scanning/http_url_list.py b/ooni/nettests/scanning/http_url_list.py
new file mode 100644
index 0000000..0accaae
--- /dev/null
+++ b/ooni/nettests/scanning/http_url_list.py
@@ -0,0 +1,98 @@
+# -*- encoding: utf-8 -*-
+#
+# :authors: Arturo Filastò
+# :licence: see LICENSE
+
+from twisted.internet import defer
+from twisted.python import usage
+from ooni.templates import httpt
+from ooni.utils import log
+
+class UsageOptions(usage.Options):
+    optParameters = [['content', 'c', None,
+                        'The file to read from containing the content of a block page'],
+                     ['url', 'u', None, 'Specify a single URL to test.']
+                    ]
+
+class HTTPURLList(httpt.HTTPTest):
+    """
+    Performs GET, POST and PUT requests to a list of URLs specified as
+    input and checks if the page that we get back as a result matches that
+    of a block page given as input.
+
+    If no block page is given as input to the test it will simply collect the
+    responses to the HTTP requests and write them to a report file.
+    """
+    name = "HTTP URL List"
+    author = "Arturo Filastò"
+    version = "0.1.3"
+
+    usageOptions = UsageOptions
+
+    inputFile = ['file', 'f', None, 
+            'List of URLS to perform GET and POST requests to']
+
+    def setUp(self):
+        """
+        Check for inputs.
+        """
+        if self.input:
+            self.url = self.input
+        elif self.localOptions['url']:
+            self.url = self.localOptions['url']
+        else:
+            raise Exception("No input specified")
+
+    def check_for_content_censorship(self, body):
+        """
+        If we have specified what a censorship page looks like here we will
+        check if the page we are looking at matches it.
+
+        XXX this is not tested, though it is basically what was used to detect
+        censorship in the palestine case.
+        """
+        self.report['censored'] = True
+
+        censorship_page = open(self.localOptions['content']).xreadlines()
+        response_page = iter(body.split("\n"))
+
+        # We first allign the two pages to the first HTML tag (something
+        # starting with <). This is useful so that we can give as input to this
+        # test something that comes from the output of curl -kis
+        # http://the_page/
+        for line in censorship_page:
+            if line.strip().startswith("<"):
+                break
+        for line in response_page:
+            if line.strip().startswith("<"):
+                break
+
+        for censorship_line in censorship_page:
+            try:
+                response_line = response_page.next()
+            except StopIteration:
+                # The censored page and the response we got do not match in
+                # length.
+                self.report['censored'] = False
+                break
+            censorship_line = censorship_line.replace("\n", "")
+            if response_line != censorship_line:
+                self.report['censored'] = False
+
+        censorship_page.close()
+
+    def processResponseBody(self, body):
+        if self.localOptions['content']:
+            log.msg("Checking for censorship in response body")
+            self.check_for_content_censorship(body)
+
+    def test_get(self):
+        return self.doRequest(self.url, method="GET")
+
+    def test_post(self):
+        return self.doRequest(self.url, method="POST")
+
+    def test_put(self):
+        return self.doRequest(self.url, method="PUT")
+
+
diff --git a/ooni/nettests/third_party/Makefile b/ooni/nettests/third_party/Makefile
new file mode 100644
index 0000000..16adfe0
--- /dev/null
+++ b/ooni/nettests/third_party/Makefile
@@ -0,0 +1,3 @@
+fetch:
+	wget http://netalyzr.icsi.berkeley.edu/NetalyzrCLI.jar
+	chmod +x NetalyzrCLI.jar
diff --git a/ooni/nettests/third_party/README b/ooni/nettests/third_party/README
new file mode 100644
index 0000000..d9e435f
--- /dev/null
+++ b/ooni/nettests/third_party/README
@@ -0,0 +1,14 @@
+There is no license for NetalyzrCLI.jar; so while we include it, it's just
+for ease of use.
+
+We currently support interfacing with the ICSI Netalyzr system by wrapping
+the NetalyzrCLI.jar client. It was downloaded on August 5th, 2011 from the
+following URL:
+  http://netalyzr.icsi.berkeley.edu/NetalyzrCLI.jar
+
+More information about the client is available on the cli web page:
+  http://netalyzr.icsi.berkeley.edu/cli.html
+
+After looking at NetalyzrCLI.jar, I discovered that '-d' runs it in a
+debugging mode that is quite useful for understanding their testing
+framework as it runs.
diff --git a/ooni/nettests/third_party/__init__.py b/ooni/nettests/third_party/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/ooni/nettests/third_party/netalyzr.py b/ooni/nettests/third_party/netalyzr.py
new file mode 100644
index 0000000..9b21831
--- /dev/null
+++ b/ooni/nettests/third_party/netalyzr.py
@@ -0,0 +1,58 @@
+# -*- encoding: utf-8 -*-
+#
+# This is a wrapper around the Netalyzer Java command line client
+#
+# :authors: Jacob Appelbaum <jacob@xxxxxxxxxxxxx>
+#           Arturo "hellais" Filastò <art@xxxxxxxxx>
+# :licence: see LICENSE
+
+from ooni import nettest
+from ooni.utils import log
+import time
+import os
+from twisted.internet import reactor, threads, defer
+
+class NetalyzrWrapperTest(nettest.NetTestCase):
+    name = "NetalyzrWrapper"
+
+    def setUp(self):
+        cwd = os.path.abspath(os.path.join(os.path.abspath(__file__), '..'))
+
+        # XXX set the output directory to something more uniform
+        outputdir = os.path.join(cwd, '..', '..')
+
+        program_path = os.path.join(cwd, 'NetalyzrCLI.jar')
+        program = "java -jar %s -d" % program_path
+
+        test_token = time.asctime(time.gmtime()).replace(" ", "_").strip()
+
+        self.output_file = os.path.join(outputdir,
+                "NetalyzrCLI_" + test_token + ".out")
+        self.output_file.strip()
+        self.run_me = program + " 2>&1 >> " + self.output_file
+
+    def blocking_call(self):
+        try:
+            result = threads.blockingCallFromThread(reactor, os.system, self.run_me) 
+        except:
+            log.debug("Netalyzr had an error, please see the log file: %s" % self.output_file)
+        finally:
+            self.clean_up()
+
+    def clean_up(self):
+        self.report['netalyzr_report'] = self.output_file
+        log.debug("finished running NetalzrWrapper")
+        log.debug("Please check %s for Netalyzr output" % self.output_file)
+
+    def test_run_netalyzr(self):
+        """
+        This test simply wraps netalyzr and runs it from command line
+        """
+        log.msg("Running NetalyzrWrapper (this will take some time, be patient)")
+        log.debug("with command '%s'" % self.run_me)
+        # XXX we probably want to use a processprotocol here to obtain the
+        # stdout from Netalyzr. This would allows us to visualize progress
+        # (currently there is no progress because the stdout of os.system is
+        # trapped by twisted) and to include the link to the netalyzr report
+        # directly in the OONI report, perhaps even downloading it.
+        reactor.callInThread(self.blocking_call)



_______________________________________________
tor-commits mailing list
tor-commits@xxxxxxxxxxxxxxxxxxxx
https://lists.torproject.org/cgi-bin/mailman/listinfo/tor-commits