[Author Prev][Author Next][Thread Prev][Thread Next][Author Index][Thread Index]

[tor-commits] [ooni-probe/develop] Move nettests into data/ subdirectory



commit af5862bf7443eaba008b1de134c17991bf0c4373
Author: Arturo Filastò <art@xxxxxxxxx>
Date:   Tue Apr 23 16:58:36 2013 +0200

    Move nettests into data/ subdirectory
---
 data/nettests/blocking/__init__.py                 |    1 +
 data/nettests/blocking/dnsconsistency.py           |  173 ++++++
 data/nettests/blocking/http_requests.py            |  130 ++++
 data/nettests/blocking/tcpconnect.py               |   45 ++
 data/nettests/examples/example_dns_http.py         |   11 +
 data/nettests/examples/example_dnst.py             |   13 +
 data/nettests/examples/example_http_checksum.py    |   27 +
 data/nettests/examples/example_httpt.py            |   36 ++
 data/nettests/examples/example_myip.py             |   21 +
 data/nettests/examples/example_scapyt.py           |   29 +
 data/nettests/examples/example_scapyt_yield.py     |   25 +
 data/nettests/examples/example_simple.py           |    8 +
 data/nettests/examples/example_tcpt.py             |   21 +
 .../experimental/bridge_reachability/bridget.py    |  462 ++++++++++++++
 .../experimental/bridge_reachability/echo.py       |  132 ++++
 data/nettests/experimental/chinatrigger.py         |  108 ++++
 data/nettests/experimental/dns_injection.py        |   63 ++
 data/nettests/experimental/domclass_collector.py   |   33 +
 .../experimental/http_filtering_bypassing.py       |   84 +++
 .../experimental/http_keyword_filtering.py         |   45 ++
 data/nettests/experimental/http_trix.py            |   47 ++
 .../experimental/http_uk_mobile_networks.py        |   85 +++
 data/nettests/experimental/keyword_filtering.py    |   52 ++
 data/nettests/experimental/parasitictraceroute.py  |  129 ++++
 data/nettests/experimental/squid.py                |  117 ++++
 data/nettests/manipulation/captiveportal.py        |  650 ++++++++++++++++++++
 data/nettests/manipulation/daphne.py               |  119 ++++
 data/nettests/manipulation/dnsspoof.py             |   69 +++
 .../manipulation/http_header_field_manipulation.py |  189 ++++++
 data/nettests/manipulation/http_host.py            |  141 +++++
 .../manipulation/http_invalid_request_line.py      |  106 ++++
 data/nettests/manipulation/traceroute.py           |  143 +++++
 data/nettests/scanning/http_url_list.py            |   98 +++
 data/nettests/third_party/Makefile                 |    3 +
 data/nettests/third_party/README                   |   14 +
 data/nettests/third_party/netalyzr.py              |   58 ++
 nettests/blocking/__init__.py                      |    1 -
 nettests/blocking/dnsconsistency.py                |  173 ------
 nettests/blocking/http_requests.py                 |  130 ----
 nettests/blocking/tcpconnect.py                    |   45 --
 nettests/examples/example_dns_http.py              |   11 -
 nettests/examples/example_dnst.py                  |   13 -
 nettests/examples/example_http_checksum.py         |   27 -
 nettests/examples/example_httpt.py                 |   36 --
 nettests/examples/example_myip.py                  |   21 -
 nettests/examples/example_scapyt.py                |   29 -
 nettests/examples/example_scapyt_yield.py          |   25 -
 nettests/examples/example_simple.py                |    8 -
 nettests/examples/example_tcpt.py                  |   21 -
 .../experimental/bridge_reachability/bridget.py    |  462 --------------
 nettests/experimental/bridge_reachability/echo.py  |  132 ----
 nettests/experimental/chinatrigger.py              |  108 ----
 nettests/experimental/dns_injection.py             |   63 --
 nettests/experimental/domclass_collector.py        |   33 -
 nettests/experimental/http_filtering_bypassing.py  |   84 ---
 nettests/experimental/http_keyword_filtering.py    |   45 --
 nettests/experimental/http_trix.py                 |   47 --
 nettests/experimental/http_uk_mobile_networks.py   |   85 ---
 nettests/experimental/keyword_filtering.py         |   52 --
 nettests/experimental/parasitictraceroute.py       |  129 ----
 nettests/experimental/squid.py                     |  117 ----
 nettests/manipulation/captiveportal.py             |  650 --------------------
 nettests/manipulation/daphne.py                    |  119 ----
 nettests/manipulation/dnsspoof.py                  |   69 ---
 .../manipulation/http_header_field_manipulation.py |  189 ------
 nettests/manipulation/http_host.py                 |  141 -----
 nettests/manipulation/http_invalid_request_line.py |  106 ----
 nettests/manipulation/traceroute.py                |  143 -----
 nettests/scanning/http_url_list.py                 |   98 ---
 nettests/third_party/Makefile                      |    3 -
 nettests/third_party/README                        |   14 -
 nettests/third_party/netalyzr.py                   |   58 --
 72 files changed, 3487 insertions(+), 3487 deletions(-)

diff --git a/data/nettests/__init__.py b/data/nettests/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/data/nettests/blocking/__init__.py b/data/nettests/blocking/__init__.py
new file mode 100644
index 0000000..8b13789
--- /dev/null
+++ b/data/nettests/blocking/__init__.py
@@ -0,0 +1 @@
+
diff --git a/data/nettests/blocking/dnsconsistency.py b/data/nettests/blocking/dnsconsistency.py
new file mode 100644
index 0000000..7b6e7b9
--- /dev/null
+++ b/data/nettests/blocking/dnsconsistency.py
@@ -0,0 +1,173 @@
+# -*- encoding: utf-8 -*-
+#
+#  dnsconsistency
+#  **************
+#
+#  The test reports censorship if the cardinality of the intersection of
+#  the query result set from the control server and the query result set
+#  from the experimental server is zero, which is to say, if the two sets
+#  have no matching results whatsoever.
+#
+#  NOTE: This test frequently results in false positives due to GeoIP-based
+#  load balancing on major global sites such as google, facebook, and
+#  youtube, etc.
+#
+# :authors: Arturo Filastò, Isis Lovecruft
+# :licence: see LICENSE
+
+import pdb
+
+from twisted.python import usage
+from twisted.internet import defer
+
+from ooni.templates import dnst
+
+from ooni import nettest
+from ooni.utils import log
+
+class UsageOptions(usage.Options):
+    optParameters = [['backend', 'b', '8.8.8.8:53',
+                        'The OONI backend that runs the DNS resolver'],
+                     ['testresolvers', 'T', None,
+                        'File containing list of DNS resolvers to test against'],
+                     ['testresolver', 't', None,
+                         'Specify a single test resolver to use for testing']
+                    ]
+
+class DNSConsistencyTest(dnst.DNSTest):
+
+    name = "DNS Consistency"
+    description = "DNS censorship detection test"
+    version = "0.5"
+    authors = "Arturo Filastò, Isis Lovecruft"
+    requirements = None
+
+    inputFile = ['file', 'f', None,
+                 'Input file of list of hostnames to attempt to resolve']
+
+    usageOptions = UsageOptions
+    requiredOptions = ['backend', 'file']
+
+    def setUp(self):
+        if (not self.localOptions['testresolvers'] and \
+                not self.localOptions['testresolver']):
+            raise usage.UsageError("You did not specify a testresolver")
+
+        elif self.localOptions['testresolvers']:
+            test_resolvers_file = self.localOptions['testresolvers']
+
+        elif self.localOptions['testresolver']:
+            self.test_resolvers = [self.localOptions['testresolver']]
+
+        try:
+            with open(test_resolvers_file) as f:
+                self.test_resolvers = [x.split('#')[0].strip() for x in f.readlines()]
+                self.report['test_resolvers'] = self.test_resolvers
+            f.close()
+
+        except IOError, e:
+            log.exception(e)
+            raise usage.UsageError("Invalid test resolvers file")
+
+        except NameError:
+            log.debug("No test resolver file configured")
+
+        dns_ip, dns_port = self.localOptions['backend'].split(':')
+        self.control_dns_server = (dns_ip, int(dns_port))
+
+        self.report['control_resolver'] = self.control_dns_server
+
+    @defer.inlineCallbacks
+    def test_a_lookup(self):
+        """
+        We perform an A lookup on the DNS test servers for the domains to be
+        tested and an A lookup on the known good DNS server.
+
+        We then compare the results from test_resolvers and that from
+        control_resolver and see if the match up.
+        If they match up then no censorship is happening (tampering: false).
+
+        If they do not we do a reverse lookup (PTR) on the test_resolvers and
+        the control resolver for every IP address we got back and check to see
+        if anyone of them matches the control ones.
+
+        If they do then we take not of the fact that censorship is probably not
+        happening (tampering: reverse-match).
+
+        If they do not match then censorship is probably going on (tampering:
+        true).
+        """
+        log.msg("Doing the test lookups on %s" % self.input)
+        list_of_ds = []
+        hostname = self.input
+
+        self.report['tampering'] = {}
+
+        control_answers = yield self.performALookup(hostname, self.control_dns_server)
+        if not control_answers:
+                log.err("Got no response from control DNS server %s," \
+                        " perhaps the DNS resolver is down?" % self.control_dns_server[0])
+                self.report['tampering'][self.control_dns_server] = 'no_answer'
+                return
+
+        for test_resolver in self.test_resolvers:
+            log.msg("Testing resolver: %s" % test_resolver)
+            test_dns_server = (test_resolver, 53)
+
+            try:
+                experiment_answers = yield self.performALookup(hostname, test_dns_server)
+            except Exception, e:
+                log.err("Problem performing the DNS lookup")
+                log.exception(e)
+                self.report['tampering'][test_resolver] = 'dns_lookup_error'
+                continue
+
+            if not experiment_answers:
+                log.err("Got no response, perhaps the DNS resolver is down?")
+                self.report['tampering'][test_resolver] = 'no_answer'
+                continue
+            else:
+                log.debug("Got the following A lookup answers %s from %s" % (experiment_answers, test_resolver))
+
+            def lookup_details():
+                """
+                A closure useful for printing test details.
+                """
+                log.msg("test resolver: %s" % test_resolver)
+                log.msg("experiment answers: %s" % experiment_answers)
+                log.msg("control answers: %s" % control_answers)
+
+            log.debug("Comparing %s with %s" % (experiment_answers, control_answers))
+            if set(experiment_answers) & set(control_answers):
+                lookup_details()
+                log.msg("tampering: false")
+                self.report['tampering'][test_resolver] = False
+            else:
+                log.msg("Trying to do reverse lookup")
+
+                experiment_reverse = yield self.performPTRLookup(experiment_answers[0], test_dns_server)
+                control_reverse = yield self.performPTRLookup(control_answers[0], self.control_dns_server)
+
+                if experiment_reverse == control_reverse:
+                    log.msg("Further testing has eliminated false positives")
+                    lookup_details()
+                    log.msg("tampering: reverse_match")
+                    self.report['tampering'][test_resolver] = 'reverse_match'
+                else:
+                    log.msg("Reverse lookups do not match")
+                    lookup_details()
+                    log.msg("tampering: true")
+                    self.report['tampering'][test_resolver] = True
+
+    def inputProcessor(self, filename=None):
+        """
+        This inputProcessor extracts domain names from urls
+        """
+        log.debug("Running dnsconsistency default processor")
+        if filename:
+            fp = open(filename)
+            for x in fp.readlines():
+                yield x.strip().split('//')[-1].split('/')[0]
+            fp.close()
+        else:
+            pass
diff --git a/data/nettests/blocking/http_requests.py b/data/nettests/blocking/http_requests.py
new file mode 100644
index 0000000..9208739
--- /dev/null
+++ b/data/nettests/blocking/http_requests.py
@@ -0,0 +1,130 @@
+# -*- encoding: utf-8 -*-
+#
+# :authors: Arturo Filastò
+# :licence: see LICENSE
+
+import random
+from twisted.internet import defer
+from twisted.python import usage
+
+from ooni.utils import log
+from ooni.utils.net import userAgents
+from ooni.templates import httpt
+from ooni.errors import failureToString, handleAllFailures
+
+class UsageOptions(usage.Options):
+    optParameters = [
+                     ['url', 'u', None, 'Specify a single URL to test.'],
+                     ['factor', 'f', 0.8, 'What factor should be used for triggering censorship (0.8 == 80%)']
+                    ]
+
+class HTTPRequestsTest(httpt.HTTPTest):
+    """
+    Performs a two GET requests to the set of sites to be tested for
+    censorship, one over a known good control channel (Tor), the other over the
+    test network.
+
+    We check to see if the response headers match and if the response body
+    lengths match.
+    """
+    name = "HTTP Requests Test"
+    author = "Arturo Filastò"
+    version = "0.2.3"
+
+    usageOptions = UsageOptions
+
+    inputFile = ['file', 'f', None,
+            'List of URLS to perform GET and POST requests to']
+
+    # These values are used for determining censorship based on response body
+    # lengths
+    control_body_length = None
+    experiment_body_length = None
+
+    def setUp(self):
+        """
+        Check for inputs.
+        """
+        if self.input:
+            self.url = self.input
+        elif self.localOptions['url']:
+            self.url = self.localOptions['url']
+        else:
+            raise Exception("No input specified")
+
+        self.factor = self.localOptions['factor']
+        self.report['control_failure'] = None
+        self.report['experiment_failure'] = None
+
+    def compare_body_lengths(self, body_length_a, body_length_b):
+
+        if body_length_b == 0 and body_length_a != 0:
+            rel = float(body_length_b)/float(body_length_a)
+        elif body_length_b == 0 and body_length_a == 0:
+            rel = float(1)
+        else:
+            rel = float(body_length_a)/float(body_length_b)
+
+        if rel > 1:
+            rel = 1/rel
+
+        self.report['body_proportion'] = rel
+        self.report['factor'] = self.factor
+        if rel > self.factor:
+            log.msg("The two body lengths appear to match")
+            log.msg("censorship is probably not happening")
+            self.report['body_length_match'] = True
+        else:
+            log.msg("The two body lengths appear to not match")
+            log.msg("censorship could be happening")
+            self.report['body_length_match'] = False
+
+    def compare_headers(self, headers_a, headers_b):
+        diff = headers_a.getDiff(headers_b)
+        if diff:
+            log.msg("Headers appear to *not* match")
+            self.report['headers_diff'] = diff
+            self.report['headers_match'] = False
+        else:
+            log.msg("Headers appear to match")
+            self.report['headers_diff'] = diff
+            self.report['headers_match'] = True
+
+    def test_get(self):
+        def callback(res):
+            experiment, control = res
+            experiment_succeeded, experiment_result = experiment
+            control_succeeded, control_result = control
+
+            if control_succeeded and experiment_succeeded:
+                self.compare_body_lengths(len(experiment_result.body),
+                        len(control_result.body))
+
+                self.compare_headers(control_result.headers,
+                        experiment_result.headers)
+
+            if not control_succeeded:
+                self.report['control_failure'] = failureToString(control_result)
+
+            if not experiment_succeeded:
+                self.report['experiment_failure'] = failureToString(experiment_result)
+
+        headers = {'User-Agent': [random.choice(userAgents)]}
+
+        l = []
+        log.msg("Performing GET request to %s" % self.url)
+        experiment_request = self.doRequest(self.url, method="GET",
+                headers=headers)
+
+        log.msg("Performing GET request to %s via Tor" % self.url)
+        control_request = self.doRequest(self.url, method="GET",
+                use_tor=True, headers=headers)
+
+        l.append(experiment_request)
+        l.append(control_request)
+
+        dl = defer.DeferredList(l, consumeErrors=True)
+        dl.addCallback(callback)
+
+        return dl
+
diff --git a/data/nettests/blocking/tcpconnect.py b/data/nettests/blocking/tcpconnect.py
new file mode 100644
index 0000000..3b22427
--- /dev/null
+++ b/data/nettests/blocking/tcpconnect.py
@@ -0,0 +1,45 @@
+# -*- encoding: utf-8 -*-
+from twisted.internet.protocol import Factory, Protocol
+from twisted.internet.endpoints import TCP4ClientEndpoint
+
+from twisted.internet.error import ConnectionRefusedError
+from twisted.internet.error import TCPTimedOutError, TimeoutError
+
+from ooni import nettest
+from ooni.errors import handleAllFailures
+from ooni.utils import log
+
+class TCPFactory(Factory):
+    def buildProtocol(self, addr):
+        return Protocol()
+
+class TCPConnectTest(nettest.NetTestCase):
+    name = "TCP Connect"
+    author = "Arturo Filastò"
+    version = "0.1"
+    inputFile = ['file', 'f', None,
+            'File containing the IP:PORT combinations to be tested, one per line']
+
+    requiredOptions = ['file']
+    def test_connect(self):
+        """
+        This test performs a TCP connection to the remote host on the specified port.
+        the report will contains the string 'success' if the test has
+        succeeded, or the reason for the failure if it has failed.
+        """
+        host, port = self.input.split(":")
+        def connectionSuccess(protocol):
+            protocol.transport.loseConnection()
+            log.debug("Got a connection to %s" % self.input)
+            self.report["connection"] = 'success'
+
+        def connectionFailed(failure):
+            self.report['connection'] = handleAllFailures(failure)
+
+        from twisted.internet import reactor
+        point = TCP4ClientEndpoint(reactor, host, int(port))
+        d = point.connect(TCPFactory())
+        d.addCallback(connectionSuccess)
+        d.addErrback(connectionFailed)
+        return d
+
diff --git a/data/nettests/examples/__init__.py b/data/nettests/examples/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/data/nettests/examples/example_dns_http.py b/data/nettests/examples/example_dns_http.py
new file mode 100644
index 0000000..9b76775
--- /dev/null
+++ b/data/nettests/examples/example_dns_http.py
@@ -0,0 +1,11 @@
+from twisted.internet import defer
+from ooni.templates import httpt, dnst
+
+class TestDNSandHTTP(httpt.HTTPTest, dnst.DNSTest):
+
+    @defer.inlineCallbacks
+    def test_http_and_dns(self):
+        yield self.doRequest('http://torproject.org')
+        yield self.performALookup('torproject.org', ('8.8.8.8', 53))
+
+
diff --git a/data/nettests/examples/example_dnst.py b/data/nettests/examples/example_dnst.py
new file mode 100644
index 0000000..6905637
--- /dev/null
+++ b/data/nettests/examples/example_dnst.py
@@ -0,0 +1,13 @@
+from ooni.templates import dnst
+
+class ExampleDNSTest(dnst.DNSTest):
+    inputFile = ['file', 'f', None, 'foobar']
+
+    def test_a_lookup(self):
+        def gotResult(result):
+            # Result is an array containing all the A record lookup results
+            print result
+
+        d = self.performALookup('torproject.org', ('8.8.8.8', 53))
+        d.addCallback(gotResult)
+        return d
diff --git a/data/nettests/examples/example_http_checksum.py b/data/nettests/examples/example_http_checksum.py
new file mode 100644
index 0000000..9226b52
--- /dev/null
+++ b/data/nettests/examples/example_http_checksum.py
@@ -0,0 +1,27 @@
+# -*- encoding: utf-8 -*-
+#
+# :authors: Aaron Gibson
+# :licence: see LICENSE
+
+from ooni.utils import log
+from ooni.templates import httpt
+from hashlib import sha256
+
+class SHA256HTTPBodyTest(httpt.HTTPTest):
+    name = "ChecksumHTTPBodyTest"
+    author = "Aaron Gibson"
+    version = 0.1
+
+    inputFile = ['file', 'f', None, 
+            'List of URLS to perform GET requests to']
+
+    def test_http(self):
+        if self.input:
+            url = self.input
+            return self.doRequest(url)
+        else:
+            raise Exception("No input specified")
+
+    def processResponseBody(self, body):
+        body_sha256sum = sha256(body).digest()
+        self.report['checksum'] = body_sha256sum
diff --git a/data/nettests/examples/example_httpt.py b/data/nettests/examples/example_httpt.py
new file mode 100644
index 0000000..e76aed4
--- /dev/null
+++ b/data/nettests/examples/example_httpt.py
@@ -0,0 +1,36 @@
+# -*- encoding: utf-8 -*-
+#
+# :authors: Arturo Filastò
+# :licence: see LICENSE
+
+from ooni.utils import log
+from ooni.templates import httpt
+
+class ExampleHTTP(httpt.HTTPTest):
+    name = "Example HTTP Test"
+    author = "Arturo Filastò"
+    version = 0.1
+
+    inputs = ['http://google.com/', 'http://wikileaks.org/',
+              'http://torproject.org/']
+
+    def test_http(self):
+        if self.input:
+            url = self.input
+            return self.doRequest(url)
+        else:
+            raise Exception("No input specified")
+
+    def processResponseBody(self, body):
+        # XXX here shall go your logic
+        #     for processing the body
+        if 'blocked' in body:
+            self.report['censored'] = True
+        else:
+            self.report['censored'] = False
+
+    def processResponseHeaders(self, headers):
+        # XXX place in here all the logic for handling the processing of HTTP
+        #     Headers.
+        pass
+
diff --git a/data/nettests/examples/example_myip.py b/data/nettests/examples/example_myip.py
new file mode 100644
index 0000000..70cf773
--- /dev/null
+++ b/data/nettests/examples/example_myip.py
@@ -0,0 +1,21 @@
+# -*- encoding: utf-8 -*-
+#
+# :authors: Arturo Filastò
+# :licence: see LICENSE
+
+from ooni.templates import httpt
+class MyIP(httpt.HTTPTest):
+    inputs = ['https://check.torproject.org']
+
+    def test_lookup(self):
+        return self.doRequest(self.input)
+
+    def processResponseBody(self, body):
+        import re
+        regexp = "Your IP address appears to be: <b>(.+?)<\/b>"
+        match = re.search(regexp, body)
+        try:
+            self.report['myip'] = match.group(1)
+        except:
+            self.report['myip'] = None
+
diff --git a/data/nettests/examples/example_scapyt.py b/data/nettests/examples/example_scapyt.py
new file mode 100644
index 0000000..ba04072
--- /dev/null
+++ b/data/nettests/examples/example_scapyt.py
@@ -0,0 +1,29 @@
+# -*- encoding: utf-8 -*-
+#
+# :licence: see LICENSE
+
+from twisted.python import usage
+
+from scapy.all import IP, ICMP
+
+from ooni.templates import scapyt
+
+class UsageOptions(usage.Options):
+    optParameters = [['target', 't', '8.8.8.8', "Specify the target to ping"]]
+    
+class ExampleICMPPingScapy(scapyt.BaseScapyTest):
+    name = "Example ICMP Ping Test"
+
+    usageOptions = UsageOptions
+
+    def test_icmp_ping(self):
+        def finished(packets):
+            print packets
+            answered, unanswered = packets
+            for snd, rcv in answered:
+                rcv.show()
+
+        packets = IP(dst=self.localOptions['target'])/ICMP()
+        d = self.sr(packets)
+        d.addCallback(finished)
+        return d
diff --git a/data/nettests/examples/example_scapyt_yield.py b/data/nettests/examples/example_scapyt_yield.py
new file mode 100644
index 0000000..311b5aa
--- /dev/null
+++ b/data/nettests/examples/example_scapyt_yield.py
@@ -0,0 +1,25 @@
+# -*- encoding: utf-8 -*-
+#
+# :licence: see LICENSE
+
+from twisted.python import usage
+from twisted.internet import defer
+
+from scapy.all import IP, ICMP
+
+from ooni.templates import scapyt
+
+class UsageOptions(usage.Options):
+    optParameters = [['target', 't', self.localOptions['target'], "Specify the target to ping"]]
+
+class ExampleICMPPingScapyYield(scapyt.BaseScapyTest):
+    name = "Example ICMP Ping Test"
+
+    usageOptions = UsageOptions
+
+    @defer.inlineCallbacks
+    def test_icmp_ping(self):
+        packets = IP(dst=self.localOptions['target'])/ICMP()
+        answered, unanswered = yield self.sr(packets)
+        for snd, rcv in answered:
+            rcv.show()
diff --git a/data/nettests/examples/example_simple.py b/data/nettests/examples/example_simple.py
new file mode 100644
index 0000000..24de5a6
--- /dev/null
+++ b/data/nettests/examples/example_simple.py
@@ -0,0 +1,8 @@
+from twisted.internet import defer
+from ooni import nettest
+
+class MyIP(nettest.NetTestCase):
+    def test_simple(self):
+        self.report['foobar'] = 'antani'
+        return defer.succeed(42)
+
diff --git a/data/nettests/examples/example_tcpt.py b/data/nettests/examples/example_tcpt.py
new file mode 100644
index 0000000..613160b
--- /dev/null
+++ b/data/nettests/examples/example_tcpt.py
@@ -0,0 +1,21 @@
+
+from twisted.internet.error import ConnectionRefusedError
+from ooni.utils import log
+from ooni.templates import tcpt
+
+class ExampleTCPT(tcpt.TCPTest):
+    def test_hello_world(self):
+        def got_response(response):
+            print "Got this data %s" % response
+
+        def connection_failed(failure):
+            failure.trap(ConnectionRefusedError)
+            print "Connection Refused"
+
+        self.address = "127.0.0.1"
+        self.port = 57002
+        payload = "Hello World!\n\r"
+        d = self.sendPayload(payload)
+        d.addErrback(connection_failed)
+        d.addCallback(got_response)
+        return d
diff --git a/data/nettests/experimental/__init__.py b/data/nettests/experimental/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/data/nettests/experimental/bridge_reachability/bridget.py b/data/nettests/experimental/bridge_reachability/bridget.py
new file mode 100644
index 0000000..acf3dff
--- /dev/null
+++ b/data/nettests/experimental/bridge_reachability/bridget.py
@@ -0,0 +1,462 @@
+#!/usr/bin/env python
+# -*- encoding: utf-8 -*-
+#
+#  +-----------+
+#  |  BRIDGET  |
+#  |        +--------------------------------------------+
+#  +--------| Use a Tor process to test making a Tor     |
+#           | connection to a list of bridges or relays. |
+#           +--------------------------------------------+
+#
+# :authors: Isis Lovecruft, Arturo Filasto
+# :licence: see included LICENSE
+# :version: 0.1.0-alpha
+
+from __future__           import with_statement
+from functools            import partial
+from random               import randint
+
+import os
+import sys
+
+from twisted.python       import usage
+from twisted.internet     import defer, error, reactor
+
+from ooni                 import nettest
+
+from ooni.utils           import log, date
+from ooni.utils.config    import ValueChecker
+
+from ooni.utils.onion     import TxtorconImportError
+from ooni.utils.onion     import PTNoBridgesException, PTNotFoundException
+
+
+try:
+    from ooni.utils.onion     import parse_data_dir
+except:
+    log.msg("Please go to /ooni/lib and do 'make txtorcon' to run this test!")
+
+class MissingAssetException(Exception):
+    pass
+
+class RandomPortException(Exception):
+    """Raised when using a random port conflicts with configured ports."""
+    def __init__(self):
+        log.msg("Unable to use random and specific ports simultaneously")
+        return sys.exit()
+
+class BridgetArgs(usage.Options):
+    """Commandline options."""
+    allowed = "Port to use for Tor's %s, must be between 1024 and 65535."
+    sock_check = ValueChecker(allowed % "SocksPort").port_check
+    ctrl_check = ValueChecker(allowed % "ControlPort").port_check
+
+    optParameters = [
+        ['bridges', 'b', None,
+         'File listing bridge IP:ORPorts to test'],
+        ['relays', 'f', None,
+         'File listing relay IPs to test'],
+        ['socks', 's', 9049, None, sock_check],
+        ['control', 'c', 9052, None, ctrl_check],
+        ['torpath', 'p', None,
+         'Path to the Tor binary to use'],
+        ['datadir', 'd', None,
+         'Tor DataDirectory to use'],
+        ['transport', 't', None,
+         'Tor ClientTransportPlugin'],
+        ['resume', 'r', 0,
+         'Resume at this index']]
+    optFlags = [['random', 'x', 'Use random ControlPort and SocksPort']]
+
+    def postOptions(self):
+        if not self['bridges'] and not self['relays']:
+            raise MissingAssetException(
+                "Bridget can't run without bridges or relays to test!")
+        if self['transport']:
+            ValueChecker.uid_check(
+                "Can't run bridget as root with pluggable transports!")
+            if not self['bridges']:
+                raise PTNoBridgesException
+        if self['socks'] or self['control']:
+            if self['random']:
+                raise RandomPortException
+        if self['datadir']:
+            ValueChecker.dir_check(self['datadir'])
+        if self['torpath']:
+            ValueChecker.file_check(self['torpath'])
+
+class BridgetTest(nettest.NetTestCase):
+    """
+    XXX fill me in
+
+    :ivar config:
+        An :class:`ooni.lib.txtorcon.TorConfig` instance.
+    :ivar relays:
+        A list of all provided relays to test.
+    :ivar bridges:
+        A list of all provided bridges to test.
+    :ivar socks_port:
+        Integer for Tor's SocksPort.
+    :ivar control_port:
+        Integer for Tor's ControlPort.
+    :ivar transport:
+        String defining the Tor's ClientTransportPlugin, for testing
+        a bridge's pluggable transport functionality.
+    :ivar tor_binary:
+        Path to the Tor binary to use, e.g. \'/usr/sbin/tor\'
+    """
+    name    = "bridget"
+    author  = "Isis Lovecruft <isis@xxxxxxxxxxxxxx>"
+    version = "0.1"
+    description   = "Use a Tor process to test connecting to bridges or relays"
+    usageOptions = BridgetArgs
+
+    def setUp(self):
+        """
+        Extra initialization steps. We only want one child Tor process
+        running, so we need to deal with most of the TorConfig() only once,
+        before the experiment runs.
+        """
+        self.socks_port      = 9049
+        self.control_port    = 9052
+        self.circuit_timeout = 90
+        self.tor_binary      = '/usr/sbin/tor'
+        self.data_directory  = None
+
+        def read_from_file(filename):
+            log.msg("Loading information from %s ..." % opt)
+            with open(filename) as fp:
+                lst = []
+                for line in fp.readlines():
+                    if line.startswith('#'):
+                        continue
+                    else:
+                        lst.append(line.replace('\n',''))
+                return lst
+
+        def __count_remaining__(which):
+            total, reach, unreach = map(lambda x: which[x],
+                                        ['all', 'reachable', 'unreachable'])
+            count = len(total) - reach() - unreach()
+            return count
+
+        ## XXX should we do report['bridges_up'].append(self.bridges['current'])
+        self.bridges = {}
+        self.bridges['all'], self.bridges['up'], self.bridges['down'] = \
+            ([] for i in range(3))
+        self.bridges['reachable']   = lambda: len(self.bridges['up'])
+        self.bridges['unreachable'] = lambda: len(self.bridges['down'])
+        self.bridges['remaining']   = lambda: __count_remaining__(self.bridges)
+        self.bridges['current']     = None
+        self.bridges['pt_type']     = None
+        self.bridges['use_pt']      = False
+
+        self.relays = {}
+        self.relays['all'], self.relays['up'], self.relays['down'] = \
+            ([] for i in range(3))
+        self.relays['reachable']   = lambda: len(self.relays['up'])
+        self.relays['unreachable'] = lambda: len(self.relays['down'])
+        self.relays['remaining']   = lambda: __count_remaining__(self.relays)
+        self.relays['current']     = None
+
+        if self.localOptions:
+            try:
+                from txtorcon import TorConfig
+            except ImportError:
+                raise TxtorconImportError
+            else:
+                self.config = TorConfig()
+            finally:
+                options = self.localOptions
+
+            if options['bridges']:
+                self.config.UseBridges = 1
+                self.bridges['all'] = read_from_file(options['bridges'])
+            if options['relays']:
+                ## first hop must be in TorState().guards
+                # XXX where is this defined?
+                self.config.EntryNodes = ','.join(relay_list)
+                self.relays['all'] = read_from_file(options['relays'])
+            if options['socks']:
+                self.socks_port = options['socks']
+            if options['control']:
+                self.control_port = options['control']
+            if options['random']:
+                log.msg("Using randomized ControlPort and SocksPort ...")
+                self.socks_port   = randint(1024, 2**16)
+                self.control_port = randint(1024, 2**16)
+            if options['torpath']:
+                self.tor_binary = options['torpath']
+            if options['datadir']:
+                self.data_directory = parse_data_dir(options['datadir'])
+            if options['transport']:
+                ## ClientTransportPlugin transport exec pathtobinary [options]
+                ## XXX we need a better way to deal with all PTs
+                log.msg("Using ClientTransportPlugin %s" % options['transport'])
+                self.bridges['use_pt'] = True
+                [self.bridges['pt_type'], pt_exec] = \
+                    options['transport'].split(' ', 1)
+
+                if self.bridges['pt_type'] == "obfs2":
+                    self.config.ClientTransportPlugin = \
+                        self.bridges['pt_type'] + " " + pt_exec
+                else:
+                    raise PTNotFoundException
+
+            self.config.SocksPort            = self.socks_port
+            self.config.ControlPort          = self.control_port
+            self.config.CookieAuthentication = 1
+
+    def test_bridget(self):
+        """
+        if bridges:
+            1. configure first bridge line
+            2a. configure data_dir, if it doesn't exist
+            2b. write torrc to a tempfile in data_dir
+            3. start tor                              } if any of these
+            4. remove bridges which are public relays } fail, add current
+            5. SIGHUP for each bridge                 } bridge to unreach-
+                                                      } able bridges.
+        if relays:
+            1a. configure the data_dir, if it doesn't exist
+            1b. write torrc to a tempfile in data_dir
+            2. start tor
+            3. remove any of our relays which are already part of current
+               circuits
+            4a. attach CustomCircuit() to self.state
+            4b. RELAY_EXTEND for each relay } if this fails, add
+                                            } current relay to list
+                                            } of unreachable relays
+            5.
+        if bridges and relays:
+            1. configure first bridge line
+            2a. configure data_dir if it doesn't exist
+            2b. write torrc to a tempfile in data_dir
+            3. start tor
+            4. remove bridges which are public relays
+            5. remove any of our relays which are already part of current
+               circuits
+            6a. attach CustomCircuit() to self.state
+            6b. for each bridge, build three circuits, with three
+                relays each
+            6c. RELAY_EXTEND for each relay } if this fails, add
+                                            } current relay to list
+                                            } of unreachable relays
+
+        :param args:
+            The :class:`BridgetAsset` line currently being used. Except that it
+            in Bridget it doesn't, so it should be ignored and avoided.
+        """
+        try:
+            from ooni.utils         import process
+            from ooni.utils.onion   import remove_public_relays, start_tor
+            from ooni.utils.onion   import start_tor_filter_nodes
+            from ooni.utils.onion   import setup_fail, setup_done
+            from ooni.utils.onion   import CustomCircuit
+            from ooni.utils.timer   import deferred_timeout, TimeoutError
+            from ooni.lib.txtorcon  import TorConfig, TorState
+        except ImportError:
+            raise TxtorconImportError
+        except TxtorconImportError, tie:
+            log.err(tie)
+            sys.exit()
+
+        def reconfigure_done(state, bridges):
+            """
+            Append :ivar:`bridges['current']` to the list
+            :ivar:`bridges['up'].
+            """
+            log.msg("Reconfiguring with 'Bridge %s' successful"
+                    % bridges['current'])
+            bridges['up'].append(bridges['current'])
+            return state
+
+        def reconfigure_fail(state, bridges):
+            """
+            Append :ivar:`bridges['current']` to the list
+            :ivar:`bridges['down'].
+            """
+            log.msg("Reconfiguring TorConfig with parameters %s failed"
+                    % state)
+            bridges['down'].append(bridges['current'])
+            return state
+
+        @defer.inlineCallbacks
+        def reconfigure_bridge(state, bridges):
+            """
+            Rewrite the Bridge line in our torrc. If use of pluggable
+            transports was specified, rewrite the line as:
+                Bridge <transport_type> <IP>:<ORPort>
+            Otherwise, rewrite in the standard form:
+                Bridge <IP>:<ORPort>
+
+            :param state:
+                A fully bootstrapped instance of
+                :class:`ooni.lib.txtorcon.TorState`.
+            :param bridges:
+                A dictionary of bridges containing the following keys:
+
+                bridges['remaining'] :: A function returning and int for the
+                                        number of remaining bridges to test.
+                bridges['current']   :: A string containing the <IP>:<ORPort>
+                                        of the current bridge.
+                bridges['use_pt']    :: A boolean, True if we're testing
+                                        bridges with a pluggable transport;
+                                        False otherwise.
+                bridges['pt_type']   :: If :ivar:`bridges['use_pt'] is True,
+                                        this is a string containing the type
+                                        of pluggable transport to test.
+            :return:
+                :param:`state`
+            """
+            log.msg("Current Bridge: %s" % bridges['current'])
+            log.msg("We now have %d bridges remaining to test..."
+                    % bridges['remaining']())
+            try:
+                if bridges['use_pt'] is False:
+                    controller_response = yield state.protocol.set_conf(
+                        'Bridge', bridges['current'])
+                elif bridges['use_pt'] and bridges['pt_type'] is not None:
+                    controller_reponse = yield state.protocol.set_conf(
+                        'Bridge', bridges['pt_type'] +' '+ bridges['current'])
+                else:
+                    raise PTNotFoundException
+
+                if controller_response == 'OK':
+                    finish = yield reconfigure_done(state, bridges)
+                else:
+                    log.err("SETCONF for %s responded with error:\n %s"
+                            % (bridges['current'], controller_response))
+                    finish = yield reconfigure_fail(state, bridges)
+
+                defer.returnValue(finish)
+
+            except Exception, e:
+                log.err("Reconfiguring torrc with Bridge line %s failed:\n%s"
+                        % (bridges['current'], e))
+                defer.returnValue(None)
+
+        def attacher_extend_circuit(attacher, deferred, router):
+            ## XXX todo write me
+            ## state.attacher.extend_circuit
+            raise NotImplemented
+            #attacher.extend_circuit
+
+        def state_attach(state, path):
+            log.msg("Setting up custom circuit builder...")
+            attacher = CustomCircuit(state)
+            state.set_attacher(attacher, reactor)
+            state.add_circuit_listener(attacher)
+            return state
+
+            ## OLD
+            #for circ in state.circuits.values():
+            #    for relay in circ.path:
+            #        try:
+            #            relay_list.remove(relay)
+            #        except KeyError:
+            #            continue
+            ## XXX how do we attach to circuits with bridges?
+            d = defer.Deferred()
+            attacher.request_circuit_build(d)
+            return d
+
+        def state_attach_fail(state):
+            log.err("Attaching custom circuit builder failed: %s" % state)
+
+        log.msg("Bridget: initiating test ... ")  ## Start the experiment
+
+        ## if we've at least one bridge, and our config has no 'Bridge' line
+        if self.bridges['remaining']() >= 1 \
+                and not 'Bridge' in self.config.config:
+
+            ## configure our first bridge line
+            self.bridges['current'] = self.bridges['all'][0]
+            self.config.Bridge = self.bridges['current']
+                                                  ## avoid starting several
+            self.config.save()                    ## processes
+            assert self.config.config.has_key('Bridge'), "No Bridge Line"
+
+            ## start tor and remove bridges which are public relays
+            from ooni.utils.onion import start_tor_filter_nodes
+            state = start_tor_filter_nodes(reactor, self.config,
+                                           self.control_port, self.tor_binary,
+                                           self.data_directory, self.bridges)
+            #controller = defer.Deferred()
+            #controller.addCallback(singleton_semaphore, tor)
+            #controller.addErrback(setup_fail)
+            #bootstrap = defer.gatherResults([controller, filter_bridges],
+            #                                consumeErrors=True)
+
+            if state is not None:
+                log.debug("state:\n%s" % state)
+                log.debug("Current callbacks on TorState():\n%s"
+                          % state.callbacks)
+
+        ## if we've got more bridges
+        if self.bridges['remaining']() >= 2:
+            #all = []
+            for bridge in self.bridges['all'][1:]:
+                self.bridges['current'] = bridge
+                #new = defer.Deferred()
+                #new.addCallback(reconfigure_bridge, state, self.bridges)
+                #all.append(new)
+            #check_remaining = defer.DeferredList(all, consumeErrors=True)
+            #state.chainDeferred(check_remaining)
+                state.addCallback(reconfigure_bridge, self.bridges)
+
+        if self.relays['remaining']() > 0:
+            while self.relays['remaining']() >= 3:
+                #path = list(self.relays.pop() for i in range(3))
+                #log.msg("Trying path %s" % '->'.join(map(lambda node:
+                #                                         node, path)))
+                self.relays['current'] = self.relays['all'].pop()
+                for circ in state.circuits.values():
+                    for node in circ.path:
+                        if node == self.relays['current']:
+                            self.relays['up'].append(self.relays['current'])
+                    if len(circ.path) < 3:
+                        try:
+                            ext = attacher_extend_circuit(state.attacher, circ,
+                                                          self.relays['current'])
+                            ext.addCallback(attacher_extend_circuit_done,
+                                            state.attacher, circ,
+                                            self.relays['current'])
+                        except Exception, e:
+                            log.err("Extend circuit failed: %s" % e)
+                    else:
+                        continue
+
+        #state.callback(all)
+        #self.reactor.run()
+        return state
+
+    def disabled_startTest(self, args):
+        """
+        Local override of :meth:`OONITest.startTest` to bypass calling
+        self.control.
+
+        :param args:
+            The current line of :class:`Asset`, not used but kept for
+            compatibility reasons.
+        :return:
+            A fired deferred which callbacks :meth:`experiment` and
+            :meth:`OONITest.finished`.
+        """
+        self.start_time = date.now()
+        self.d = self.experiment(args)
+        self.d.addErrback(log.err)
+        self.d.addCallbacks(self.finished, log.err)
+        return self.d
+
+## ISIS' NOTES
+## -----------
+## TODO:
+##       x  cleanup documentation
+##       x  add DataDirectory option
+##       x  check if bridges are public relays
+##       o  take bridge_desc file as input, also be able to give same
+##          format as output
+##       x  Add asynchronous timeout for deferred, so that we don't wait
+##       o  Add assychronous timout for deferred, so that we don't wait
+##          forever for bridges that don't work.
diff --git a/data/nettests/experimental/bridge_reachability/echo.py b/data/nettests/experimental/bridge_reachability/echo.py
new file mode 100644
index 0000000..d4033dd
--- /dev/null
+++ b/data/nettests/experimental/bridge_reachability/echo.py
@@ -0,0 +1,132 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+#
+#  +---------+
+#  | echo.py |
+#  +---------+
+#     A simple ICMP-8 ping test.
+#
+# @authors: Isis Lovecruft, <isis@xxxxxxxxxxxxxx>
+# @version: 0.0.2-pre-alpha
+# @license: copyright (c) 2012 Isis Lovecruft
+#           see attached LICENCE file
+#
+
+import os
+import sys
+
+from twisted.python   import usage
+from twisted.internet import reactor, defer
+from ooni             import nettest
+from ooni.utils       import log, net, Storage, txscapy
+
+try:
+    from scapy.all             import IP, ICMP
+    from scapy.all             import sr1
+    from ooni.lib              import txscapy
+    from ooni.lib.txscapy      import txsr, txsend
+    from ooni.templates.scapyt import BaseScapyTest
+except:
+    log.msg("This test requires scapy, see www.secdev.org/projects/scapy")
+
+class UsageOptions(usage.Options):
+    optParameters = [
+        ['dst', 'd', None, 'Host IP to ping'],
+        ['file', 'f', None, 'File of list of IPs to ping'],
+        ['interface', 'i', None, 'Network interface to use'],
+        ['count', 'c', 1, 'Number of packets to send', int],
+        ['size', 's', 56, 'Number of bytes to send in ICMP data field', int],
+        ['ttl', 'l', 25, 'Set the IP Time to Live', int],
+        ['timeout', 't', 2, 'Seconds until timeout if no response', int],
+        ['pcap', 'p', None, 'Save pcap to this file'],
+        ['receive', 'r', True, 'Receive response packets']]
+
+class EchoTest(nettest.NetTestCase):
+    """
+    xxx fill me in
+    """
+    name         = 'echo'
+    author       = 'Isis Lovecruft <isis@xxxxxxxxxxxxxx>'
+    description  = 'A simple ping test to see if a host is reachable.'
+    version      = '0.0.2'
+    requiresRoot = True
+
+    usageOptions    = UsageOptions
+    #requiredOptions = ['dst']
+
+    def setUp(self, *a, **kw):
+        self.destinations = {}
+
+        if self.localOptions:
+            for key, value in self.localOptions.items():
+                log.debug("setting self.%s = %s" % (key, value))
+                setattr(self, key, value)
+
+        self.timeout *= 1000            ## convert to milliseconds
+
+        if not self.interface:
+            try:
+                iface = txscapy.getDefaultIface()
+            except Exception, e:
+                log.msg("No network interface specified!")
+                log.err(e)
+            else:
+                log.msg("Using system default interface: %s" % iface)
+                self.interface = iface
+
+        if self.pcap:
+            try:
+                self.pcapfile = open(self.pcap, 'a+')
+            except:
+                log.msg("Unable to write to pcap file %s" % self.pcap)
+            else:
+                self.pcap = net.capturePacket(self.pcapfile)
+
+        if not self.dst:
+            if self.file:
+                self.dstProcessor(self.file)
+                for key, value in self.destinations.items():
+                    for label, data in value.items():
+                        if not 'ans' in data:
+                            self.dst = label
+        else:
+            self.addDest(self.dst)
+        log.debug("self.dst is now: %s" % self.dst)
+
+        log.debug("Initialization of %s test completed." % self.name)
+
+    def addDest(self, dest):
+        d = dest.strip()
+        self.destinations[d] = {'dst_ip': d}
+
+    def dstProcessor(self, inputfile):
+        from ipaddr import IPAddress
+
+        if os.path.isfile(inputfile):
+            with open(inputfile) as f:
+                for line in f.readlines():
+                    if line.startswith('#'):
+                        continue
+                    self.addDest(line)
+
+    def test_icmp(self):
+        def process_response(echo_reply, dest):
+           ans, unans = echo_reply
+           if ans:
+               log.msg("Recieved echo reply from %s: %s" % (dest, ans))
+           else:
+               log.msg("No reply was received from %s. Possible censorship event." % dest)
+               log.debug("Unanswered packets: %s" % unans)
+           self.report[dest] = echo_reply
+
+        for label, data in self.destinations.items():
+            reply = sr1(IP(dst=lebal)/ICMP())
+            process = process_reponse(reply, label)
+
+        #(ans, unans) = ping
+        #self.destinations[self.dst].update({'ans': ans,
+        #                                    'unans': unans,
+        #                                    'response_packet': ping})
+        #return ping
+
+        #return reply
diff --git a/data/nettests/experimental/chinatrigger.py b/data/nettests/experimental/chinatrigger.py
new file mode 100644
index 0000000..de1f64d
--- /dev/null
+++ b/data/nettests/experimental/chinatrigger.py
@@ -0,0 +1,108 @@
+import random
+import string
+import struct
+import time
+
+from twisted.python import usage
+from ooni.templates.scapyt import BaseScapyTest
+
+class UsageOptions(usage.Options):
+    optParameters = [['dst', 'd', None, 'Specify the target address'],
+                     ['port', 'p', None, 'Specify the target port']
+                    ]
+
+class ChinaTriggerTest(BaseScapyTest):
+    """
+    This test is a OONI based implementation of the C tool written
+    by Philipp Winter to engage chinese probes in active scanning.
+
+    Example of running it:
+    ./bin/ooniprobe chinatrigger -d 127.0.0.1 -p 8080
+    """
+
+    name = "chinatrigger"
+    usageOptions = UsageOptions
+    requiredOptions = ['dst', 'port']
+    timeout = 2
+
+    def setUp(self):
+        self.dst = self.localOptions['dst']
+        self.port = int(self.localOptions['port'])
+
+    @staticmethod
+    def set_random_servername(pkt):
+        ret = pkt[:121]
+        for i in range(16):
+            ret += random.choice(string.ascii_lowercase)
+        ret += pkt[121+16:]
+        return ret
+
+    @staticmethod
+    def set_random_time(pkt):
+        ret = pkt[:11]
+        ret += struct.pack('!I', int(time.time()))
+        ret += pkt[11+4:]
+        return ret
+
+    @staticmethod
+    def set_random_field(pkt):
+        ret = pkt[:15]
+        for i in range(28):
+            ret += chr(random.randint(0, 255))
+        ret += pkt[15+28:]
+        return ret
+
+    @staticmethod
+    def mutate(pkt, idx):
+        """
+        Slightly changed mutate function.
+        """
+        ret = pkt[:idx-1]
+        mutation = chr(random.randint(0, 255))
+        while mutation == pkt[idx]:
+            mutation = chr(random.randint(0, 255))
+        ret += mutation
+        ret += pkt[idx:]
+        return ret
+
+    @staticmethod
+    def set_all_random_fields(pkt):
+        pkt = ChinaTriggerTest.set_random_servername(pkt)
+        pkt = ChinaTriggerTest.set_random_time(pkt)
+        pkt = ChinaTriggerTest.set_random_field(pkt)
+        return pkt
+
+    def test_send_mutations(self):
+        from scapy.all import IP, TCP
+        pkt = "\x16\x03\x01\x00\xcc\x01\x00\x00\xc8"\
+              "\x03\x01\x4f\x12\xe5\x63\x3f\xef\x7d"\
+              "\x20\xb9\x94\xaa\x04\xb0\xc1\xd4\x8c"\
+              "\x50\xcd\xe2\xf9\x2f\xa9\xfb\x78\xca"\
+              "\x02\xa8\x73\xe7\x0e\xa8\xf9\x00\x00"\
+              "\x3a\xc0\x0a\xc0\x14\x00\x39\x00\x38"\
+              "\xc0\x0f\xc0\x05\x00\x35\xc0\x07\xc0"\
+              "\x09\xc0\x11\xc0\x13\x00\x33\x00\x32"\
+              "\xc0\x0c\xc0\x0e\xc0\x02\xc0\x04\x00"\
+              "\x04\x00\x05\x00\x2f\xc0\x08\xc0\x12"\
+              "\x00\x16\x00\x13\xc0\x0d\xc0\x03\xfe"\
+              "\xff\x00\x0a\x00\xff\x01\x00\x00\x65"\
+              "\x00\x00\x00\x1d\x00\x1b\x00\x00\x18"\
+              "\x77\x77\x77\x2e\x67\x6e\x6c\x69\x67"\
+              "\x78\x7a\x70\x79\x76\x6f\x35\x66\x76"\
+              "\x6b\x64\x2e\x63\x6f\x6d\x00\x0b\x00"\
+              "\x04\x03\x00\x01\x02\x00\x0a\x00\x34"\
+              "\x00\x32\x00\x01\x00\x02\x00\x03\x00"\
+              "\x04\x00\x05\x00\x06\x00\x07\x00\x08"\
+              "\x00\x09\x00\x0a\x00\x0b\x00\x0c\x00"\
+              "\x0d\x00\x0e\x00\x0f\x00\x10\x00\x11"\
+              "\x00\x12\x00\x13\x00\x14\x00\x15\x00"\
+              "\x16\x00\x17\x00\x18\x00\x19\x00\x23"\
+              "\x00\x00"
+
+        pkt = ChinaTriggerTest.set_all_random_fields(pkt)
+        pkts = [IP(dst=self.dst)/TCP(dport=self.port)/pkt]
+        for x in range(len(pkt)):
+            mutation = IP(dst=self.dst)/TCP(dport=self.port)/ChinaTriggerTest.mutate(pkt, x)
+            pkts.append(mutation)
+        return self.sr(pkts, timeout=2)
+
diff --git a/data/nettests/experimental/dns_injection.py b/data/nettests/experimental/dns_injection.py
new file mode 100644
index 0000000..97233cf
--- /dev/null
+++ b/data/nettests/experimental/dns_injection.py
@@ -0,0 +1,63 @@
+# -*- encoding: utf-8 -*-
+from twisted.python import usage
+from twisted.internet import defer
+
+from ooni.templates import dnst
+from ooni import nettest
+from ooni.utils import log
+
+class UsageOptions(usage.Options):
+    optParameters = [
+            ['resolver', 'r', '8.8.8.1', 'an invalid DNS resolver'],
+            ['timeout', 't', 3, 'timeout after which we should consider the query failed']
+    ]
+
+class DNSInjectionTest(dnst.DNSTest):
+    """
+    This test detects DNS spoofed DNS responses by performing UDP based DNS
+    queries towards an invalid DNS resolver.
+
+    For it to work we must be traversing the network segment of a machine that
+    is actively injecting DNS query answers.
+    """
+    name = "DNS Injection"
+    description = "Checks for injection of spoofed DNS answers"
+    version = "0.1"
+    authors = "Arturo Filastò"
+
+    inputFile = ['file', 'f', None,
+                 'Input file of list of hostnames to attempt to resolve']
+
+    usageOptions = UsageOptions
+    requiredOptions = ['resolver', 'file']
+
+    def setUp(self):
+        self.resolver = (self.localOptions['resolver'], 53)
+        self.queryTimeout = [self.localOptions['timeout']]
+
+    def inputProcessor(self, filename):
+        fp = open(filename)
+        for line in fp:
+            if line.startswith('http://'):
+                yield line.replace('http://', '').replace('/', '').strip()
+            else:
+                yield line.strip()
+        fp.close()
+
+    def test_injection(self):
+        self.report['injected'] = None
+
+        d = self.performALookup(self.input, self.resolver)
+        @d.addCallback
+        def cb(res):
+            log.msg("The DNS query for %s is injected" % self.input)
+            self.report['injected'] = True
+
+        @d.addErrback
+        def err(err):
+            err.trap(defer.TimeoutError)
+            log.msg("The DNS query for %s is not injected" % self.input)
+            self.report['injected'] = False
+
+        return d
+
diff --git a/data/nettests/experimental/domclass_collector.py b/data/nettests/experimental/domclass_collector.py
new file mode 100644
index 0000000..c1866f2
--- /dev/null
+++ b/data/nettests/experimental/domclass_collector.py
@@ -0,0 +1,33 @@
+# -*- encoding: utf-8 -*-
+#
+# The purpose of this collector is to compute the eigenvector for the input
+# file containing a list of sites.
+#
+#
+# :authors: Arturo Filastò
+# :licence: see LICENSE
+
+from twisted.internet import threads, defer
+
+from ooni.kit import domclass
+from ooni.templates import httpt
+
+class DOMClassCollector(httpt.HTTPTest):
+    name = "DOM class collector"
+    author = "Arturo Filastò"
+    version = 0.1
+
+    followRedirects = True
+
+    inputFile = ['file', 'f', None, 'The list of urls to build a domclass for']
+
+    def test_collect(self):
+        if self.input:
+            url = self.input
+            return self.doRequest(url)
+        else:
+            raise Exception("No input specified")
+
+    def processResponseBody(self, body):
+        eigenvalues = domclass.compute_eigenvalues_from_DOM(content=body)
+        self.report['eigenvalues'] = eigenvalues.tolist()
diff --git a/data/nettests/experimental/http_filtering_bypassing.py b/data/nettests/experimental/http_filtering_bypassing.py
new file mode 100644
index 0000000..dc103db
--- /dev/null
+++ b/data/nettests/experimental/http_filtering_bypassing.py
@@ -0,0 +1,84 @@
+# -*- encoding: utf-8 -*-
+from twisted.python import usage
+
+from ooni.utils import log
+from ooni.utils import randomStr, randomSTR
+from ooni.templates import tcpt
+
+class UsageOptions(usage.Options):
+    optParameters = [['backend', 'b', '127.0.0.1',
+                        'The OONI backend that runs a TCP echo server'],
+                    ['backendport', 'p', 80, 'Specify the port that the TCP echo server is running (should only be set for debugging)']]
+
+class HTTPFilteringBypass(tcpt.TCPTest):
+    name = "HTTPFilteringBypass"
+    version = "0.1"
+    authors = "xx"
+
+    inputFile = ['file', 'f', None,
+            'Specify a list of hostnames to use as inputs']
+
+    usageOptions = UsageOptions
+    requiredOptions = ['backend']
+
+    def setUp(self):
+        self.port = int(self.localOptions['backendport'])
+        self.address = self.localOptions['backend']
+
+    def check_for_manipulation(self, response, payload):
+        log.debug("Checking if %s == %s" % (response, payload))
+        if response != payload:
+            self.report['tampering'] = True
+        else:
+            self.report['tampering'] = False
+
+    def test_prepend_newline(self):
+        payload = "\nGET / HTTP/1.1\n\r"
+        payload += "Host: %s\n\r" % self.input
+
+        d = self.sendPayload(payload)
+        d.addCallback(self.check_for_manipulation, payload)
+        return d
+
+    def test_tab_trick(self):
+        payload = "GET / HTTP/1.1\n\r"
+        payload += "Host: %s\t\n\r" % self.input
+
+        d = self.sendPayload(payload)
+        d.addCallback(self.check_for_manipulation, payload)
+        return d
+
+    def test_subdomain_blocking(self):
+        payload = "GET / HTTP/1.1\n\r"
+        payload += "Host: %s\n\r" % randomStr(10) + '.' + self.input
+
+        d = self.sendPayload(payload)
+        d.addCallback(self.check_for_manipulation, payload)
+        return d
+
+    def test_fuzzy_domain_blocking(self):
+        hostname_field = randomStr(10) + '.' + self.input + '.' + randomStr(10)
+        payload = "GET / HTTP/1.1\n\r"
+        payload += "Host: %s\n\r" % hostname_field
+
+        d = self.sendPayload(payload)
+        d.addCallback(self.check_for_manipulation, payload)
+        return d
+
+    def test_fuzzy_match_blocking(self):
+        hostname_field = randomStr(10) + self.input + randomStr(10)
+        payload = "GET / HTTP/1.1\n\r"
+        payload += "Host: %s\n\r" % hostname_field
+
+        d = self.sendPayload(payload)
+        d.addCallback(self.check_for_manipulation, payload)
+        return d
+
+    def test_normal_request(self):
+        payload = "GET / HTTP/1.1\n\r"
+        payload += "Host: %s\n\r" % self.input
+
+        d = self.sendPayload(payload)
+        d.addCallback(self.check_for_manipulation, payload)
+        return d
+
diff --git a/data/nettests/experimental/http_keyword_filtering.py b/data/nettests/experimental/http_keyword_filtering.py
new file mode 100644
index 0000000..0ae9c52
--- /dev/null
+++ b/data/nettests/experimental/http_keyword_filtering.py
@@ -0,0 +1,45 @@
+# -*- encoding: utf-8 -*-
+#
+# :authors: Arturo Filastò
+# :licence: see LICENSE
+
+from twisted.python import usage
+
+from ooni.templates import httpt
+
+class UsageOptions(usage.Options):
+    optParameters = [['backend', 'b', 'http://127.0.0.1:57001',
+                        'URL of the test backend to use']]
+
+class HTTPKeywordFiltering(httpt.HTTPTest):
+    """
+    This test involves performing HTTP requests containing to be tested for
+    censorship keywords.
+
+    It does not detect censorship on the client, but just logs the response from the 
+    HTTP backend server.
+    """
+    name = "HTTP Keyword Filtering"
+    author = "Arturo Filastò"
+    version = "0.1.1"
+
+    inputFile = ['file', 'f', None, 'List of keywords to use for censorship testing']
+
+    usageOptions = UsageOptions
+
+    requiredOptions = ['backend']
+
+    def test_get(self):
+        """
+        Perform a HTTP GET request to the backend containing the keyword to be
+        tested inside of the request body.
+        """
+        return self.doRequest(self.localOptions['backend'], method="GET", body=self.input)
+
+    def test_post(self):
+        """
+        Perform a HTTP POST request to the backend containing the keyword to be
+        tested inside of the request body.
+        """
+        return self.doRequest(self.localOptions['backend'], method="POST", body=self.input)
+
diff --git a/data/nettests/experimental/http_trix.py b/data/nettests/experimental/http_trix.py
new file mode 100644
index 0000000..85a4ba2
--- /dev/null
+++ b/data/nettests/experimental/http_trix.py
@@ -0,0 +1,47 @@
+# -*- encoding: utf-8 -*-
+from twisted.python import usage
+
+from ooni.utils import log
+from ooni.utils import randomStr, randomSTR
+from ooni.templates import tcpt
+
+class UsageOptions(usage.Options):
+    optParameters = [['backend', 'b', '127.0.0.1',
+                        'The OONI backend that runs a TCP echo server'],
+                    ['backendport', 'p', 80, 'Specify the port that the TCP echo server is running (should only be set for debugging)']]
+
+class HTTPTrix(tcpt.TCPTest):
+    name = "HTTPTrix"
+    version = "0.1"
+    authors = "Arturo Filastò"
+
+    usageOptions = UsageOptions
+    requiredOptions = ['backend']
+
+    def setUp(self):
+        self.port = int(self.localOptions['backendport'])
+        self.address = self.localOptions['backend']
+
+    def check_for_manipulation(self, response, payload):
+        log.debug("Checking if %s == %s" % (response, payload))
+        if response != payload:
+            self.report['tampering'] = True
+        else:
+            self.report['tampering'] = False
+
+    def test_for_squid_cache_object(self):
+        """
+        This detects the presence of a squid transparent HTTP proxy by sending
+        a request for cache_object://localhost/info.
+
+        This tests for the presence of a Squid Transparent proxy by sending:
+
+            GET cache_object://localhost/info HTTP/1.1
+        """
+        payload = 'GET cache_object://localhost/info HTTP/1.1'
+        payload += '\n\r'
+
+        d = self.sendPayload(payload)
+        d.addCallback(self.check_for_manipulation, payload)
+        return d
+
diff --git a/data/nettests/experimental/http_uk_mobile_networks.py b/data/nettests/experimental/http_uk_mobile_networks.py
new file mode 100644
index 0000000..784a9e9
--- /dev/null
+++ b/data/nettests/experimental/http_uk_mobile_networks.py
@@ -0,0 +1,85 @@
+# -*- encoding: utf-8 -*-
+import yaml
+
+from twisted.python import usage
+from twisted.plugin import IPlugin
+
+from ooni.templates import httpt
+from ooni.utils import log
+
+class UsageOptions(usage.Options):
+    """
+    See https://github.com/hellais/ooni-inputs/processed/uk_mobile_networks_redirects.yaml 
+    to see how the rules file should look like.
+    """
+    optParameters = [
+                     ['rules', 'y', None, 
+                    'Specify the redirect rules file ']
+                    ]
+
+class HTTPUKMobileNetworksTest(httpt.HTTPTest):
+    """
+    This test was thought of by Open Rights Group and implemented with the
+    purpose of detecting censorship in the UK.
+    For more details on this test see:
+    https://trac.torproject.org/projects/tor/ticket/6437
+    XXX port the knowledge from the trac ticket into this test docstring
+    """
+    name = "HTTP UK mobile network redirect test"
+
+    usageOptions = UsageOptions
+
+    followRedirects = True
+
+    inputFile = ['urls', 'f', None, 'List of urls one per line to test for censorship']
+    requiredOptions = ['urls']
+
+    def testPattern(self, value, pattern, type):
+        if type == 'eq':
+            return value == pattern
+        elif type == 're':
+            import re
+            if re.match(pattern, value):
+                return True
+            else:
+                return False
+        else:
+            return None
+
+    def testPatterns(self, patterns, location):
+        test_result = False
+
+        if type(patterns) == list:
+            for pattern in patterns:
+                test_result |= self.testPattern(location, pattern['value'], pattern['type'])
+        rules_file = self.localOptions['rules']
+
+        return test_result
+
+    def testRules(self, rules, location):
+        result = {}
+        blocked = False
+        for rule, value in rules.items():
+            current_rule = {}
+            current_rule['name'] = value['name']
+            current_rule['patterns'] = value['patterns']
+            current_rule['test'] = self.testPatterns(value['patterns'], location)
+            blocked |= current_rule['test']
+            result[rule] = current_rule
+        result['blocked'] = blocked
+        return result
+
+    def processRedirect(self, location):
+        self.report['redirect'] = None
+        rules_file = self.localOptions['rules']
+
+        fp = open(rules_file)
+        rules = yaml.safe_load(fp)
+        fp.close()
+
+        log.msg("Testing rules %s" % rules)
+        redirect = self.testRules(rules, location)
+        self.report['redirect'] = redirect
+
+
+
diff --git a/data/nettests/experimental/keyword_filtering.py b/data/nettests/experimental/keyword_filtering.py
new file mode 100644
index 0000000..9eec4ff
--- /dev/null
+++ b/data/nettests/experimental/keyword_filtering.py
@@ -0,0 +1,52 @@
+# -*- encoding: utf-8 -*-
+#
+# :authors: Arturo Filastò
+# :licence: see LICENSE
+
+from twisted.python import usage
+from twisted.internet import defer
+
+from ooni.utils import log
+from ooni.templates import scapyt
+
+from scapy.all import *
+
+class UsageOptions(usage.Options):
+    optParameters = [
+                    ['backend', 'b', '127.0.0.1:57002', 'Test backend running TCP echo'],
+                    ['timeout', 't', 5, 'Timeout after which to give up waiting for RST packets']
+                    ]
+
+class KeywordFiltering(scapyt.BaseScapyTest):
+    name = "Keyword Filtering detection based on RST packets"
+    author = "Arturo Filastò"
+    version = "0.1"
+
+    usageOptions = UsageOptions
+
+    inputFile = ['file', 'f', None, 
+            'List of keywords to use for censorship testing']
+
+    def test_tcp_keyword_filtering(self):
+        """
+        Places the keyword to be tested in the payload of a TCP packet.
+        XXX need to implement bisection method for enumerating keywords.
+            though this should not be an issue since we are testing all 
+            the keywords in parallel.
+        """
+        def finished(packets):
+            log.debug("Finished running TCP traceroute test on port %s" % port)
+            answered, unanswered = packets
+            self.report['rst_packets'] = []
+            for snd, rcv in answered:
+                # The received packet has the RST flag
+                if rcv[TCP].flags == 4:
+                    self.report['rst_packets'].append(rcv)
+
+        backend_ip, backend_port = self.localOptions['backend']
+        keyword_to_test = str(self.input)
+        packets = IP(dst=backend_ip,id=RandShort())/TCP(dport=backend_port)/keyword_to_test
+        d = self.sr(packets, timeout=timeout)
+        d.addCallback(finished)
+        return d
+
diff --git a/data/nettests/experimental/parasitictraceroute.py b/data/nettests/experimental/parasitictraceroute.py
new file mode 100644
index 0000000..631c24b
--- /dev/null
+++ b/data/nettests/experimental/parasitictraceroute.py
@@ -0,0 +1,129 @@
+# -*- encoding: utf-8 -*-
+#
+# :authors: Arturo Filastò
+# :licence: see LICENSE
+
+from twisted.python import usage
+from twisted.internet import defer
+
+from ooni.templates import scapyt
+
+from scapy.all import *
+
+from ooni.utils import log
+
+class UsageOptions(usage.Options):
+    optParameters = [['backend', 'b', 'google.com', 'Test backend to use'],
+                    ['timeout', 't', 5, 'The timeout for the traceroute test'],
+                    ['maxttl', 'm', 64, 'The maximum value of ttl to set on packets'],
+                    ['dstport', 'd', 80, 'Set the destination port of the traceroute test'],
+                    ['srcport', 'p', None, 'Set the source port to a specific value']]
+
+class ParasiticalTracerouteTest(scapyt.BaseScapyTest):
+    name = "Parasitic TCP Traceroute Test"
+    author = "Arturo Filastò"
+    version = "0.1"
+
+    usageOptions = UsageOptions
+
+    def setUp(self):
+        def get_sport():
+            if self.localOptions['srcport']:
+                return int(self.localOptions['srcport'])
+            else:
+                return random.randint(1024, 65535)
+        self.get_sport = get_sport
+
+        self.dst_ip = socket.gethostbyaddr(self.localOptions['backend'])[2][0]
+
+        self.dport = int(self.localOptions['dstport'])
+        self.max_ttl = int(self.localOptions['maxttl'])
+
+    @defer.inlineCallbacks
+    def test_parasitic_tcp_traceroute(self):
+        """
+        Establishes a TCP stream, then sequentially sends TCP packets with
+        increasing TTL until we reach the ttl of the destination.
+
+        Requires the backend to respond with an ACK to our SYN packet (i.e.
+        the port must be open)
+
+        XXX this currently does not work properly. The problem lies in the fact
+        that we are currently using the scapy layer 3 socket. This socket makes
+        packets received be trapped by the kernel TCP stack, therefore when we
+        send out a SYN and get back a SYN-ACK the kernel stack will reply with
+        a RST because it did not send a SYN.
+
+        The quick fix to this would be to establish a TCP stream using socket
+        calls and then "cannibalizing" the TCP session with scapy.
+
+        The real fix is to make scapy use libpcap instead of raw sockets
+        obviously as we previously did... arg.
+        """
+        sport = self.get_sport()
+        dport = self.dport
+        ipid = int(RandShort())
+
+        ip_layer = IP(dst=self.dst_ip,
+                id=ipid, ttl=self.max_ttl)
+
+        syn = ip_layer/TCP(sport=sport, dport=dport, flags="S", seq=0)
+
+        log.msg("Sending...")
+        syn.show2()
+
+        synack = yield self.sr1(syn)
+
+        log.msg("Got response...")
+        synack.show2()
+
+        if not synack:
+            log.err("Got no response. Try increasing max_ttl")
+            return
+
+        if synack[TCP].flags == 11:
+            log.msg("Got back a FIN ACK. The destination port is closed")
+            return
+
+        elif synack[TCP].flags == 18:
+            log.msg("Got a SYN ACK. All is well.")
+        else:
+            log.err("Got an unexpected result")
+            return
+
+        ack = ip_layer/TCP(sport=synack.dport,
+                            dport=dport, flags="A",
+                            seq=synack.ack, ack=synack.seq + 1)
+
+        yield self.send(ack)
+
+        self.report['hops'] = []
+        # For the time being we make the assumption that we are NATted and
+        # that the NAT will forward the packet to the destination even if the TTL has 
+        for ttl in range(1, self.max_ttl):
+            log.msg("Sending packet with ttl of %s" % ttl)
+            ip_layer.ttl = ttl
+            empty_tcp_packet = ip_layer/TCP(sport=synack.dport,
+                    dport=dport, flags="A",
+                    seq=synack.ack, ack=synack.seq + 1)
+
+            answer = yield self.sr1(empty_tcp_packet)
+            if not answer:
+                log.err("Got no response for ttl %s" % ttl)
+                continue
+
+            try:
+                icmp = answer[ICMP]
+                report = {'ttl': empty_tcp_packet.ttl,
+                    'address': answer.src,
+                    'rtt': answer.time - empty_tcp_packet.time
+                }
+                log.msg("%s: %s" % (dport, report))
+                self.report['hops'].append(report)
+
+            except IndexError:
+                if answer.src == self.dst_ip:
+                    answer.show()
+                    log.msg("Reached the destination. We have finished the traceroute")
+                    return
+
diff --git a/data/nettests/experimental/squid.py b/data/nettests/experimental/squid.py
new file mode 100644
index 0000000..777bc3e
--- /dev/null
+++ b/data/nettests/experimental/squid.py
@@ -0,0 +1,117 @@
+# -*- encoding: utf-8 -*-
+#
+# Squid transparent HTTP proxy detector
+# *************************************
+#
+# :authors: Arturo Filastò
+# :licence: see LICENSE
+
+from ooni import utils
+from ooni.utils import log
+from ooni.templates import httpt
+
+class SquidTest(httpt.HTTPTest):
+    """
+    This test aims at detecting the presence of a squid based transparent HTTP
+    proxy. It also tries to detect the version number.
+    """
+    name = "Squid test"
+    author = "Arturo Filastò"
+    version = "0.1"
+
+    optParameters = [['backend', 'b', 'http://ooni.nu/test/', 'Test backend to use']]
+
+    #inputFile = ['urls', 'f', None, 'Urls file']
+    inputs =['http://google.com']
+    def test_cacheobject(self):
+        """
+        This detects the presence of a squid transparent HTTP proxy by sending
+        a request for cache_object://localhost/info.
+
+        The response to this request will usually also contain the squid
+        version number.
+        """
+        log.debug("Running")
+        def process_body(body):
+            if "Access Denied." in body:
+                self.report['transparent_http_proxy'] = True
+            else:
+                self.report['transparent_http_proxy'] = False
+
+        log.msg("Testing Squid proxy presence by sending a request for "\
+                "cache_object")
+        headers = {}
+        #headers["Host"] = [self.input]
+        self.report['trans_http_proxy'] = None
+        method = "GET"
+        body = "cache_object://localhost/info"
+        return self.doRequest(self.localOptions['backend'], method=method, body=body,
+                        headers=headers, body_processor=process_body)
+
+    def test_search_bad_request(self):
+        """
+        Attempts to perform a request with a random invalid HTTP method.
+
+        If we are being MITMed by a Transparent Squid HTTP proxy we will get
+        back a response containing the X-Squid-Error header.
+        """
+        def process_headers(headers):
+            log.debug("Processing headers in test_search_bad_request")
+            if 'X-Squid-Error' in headers:
+                log.msg("Detected the presence of a transparent HTTP "\
+                        "squid proxy")
+                self.report['trans_http_proxy'] = True
+            else:
+                log.msg("Did not detect the presence of transparent HTTP "\
+                        "squid proxy")
+                self.report['transparent_http_proxy'] = False
+
+        log.msg("Testing Squid proxy presence by sending a random bad request")
+        headers = {}
+        #headers["Host"] = [self.input]
+        method = utils.randomSTR(10, True)
+        self.report['transparent_http_proxy'] = None
+        return self.doRequest(self.localOptions['backend'], method=method,
+                        headers=headers, headers_processor=process_headers)
+
+    def test_squid_headers(self):
+        """
+        Detects the presence of a squid transparent HTTP proxy based on the
+        response headers it adds to the responses to requests.
+        """
+        def process_headers(headers):
+            """
+            Checks if any of the headers that squid is known to add match the
+            squid regexp.
+
+            We are looking for something that looks like this:
+
+                via: 1.0 cache_server:3128 (squid/2.6.STABLE21)
+                x-cache: MISS from cache_server
+                x-cache-lookup: MISS from cache_server:3128
+            """
+            squid_headers = {'via': r'.* \((squid.*)\)',
+                        'x-cache': r'MISS from (\w+)',
+                        'x-cache-lookup': r'MISS from (\w+:?\d+?)'
+                        }
+
+            self.report['transparent_http_proxy'] = False
+            for key in squid_headers.keys():
+                if key in headers:
+                    log.debug("Found %s in headers" % key)
+                    m = re.search(squid_headers[key], headers[key])
+                    if m:
+                        log.msg("Detected the presence of squid transparent"\
+                                " HTTP Proxy")
+                        self.report['transparent_http_proxy'] = True
+
+        log.msg("Testing Squid proxy by looking at response headers")
+        headers = {}
+        #headers["Host"] = [self.input]
+        method = "GET"
+        self.report['transparent_http_proxy'] = None
+        d = self.doRequest(self.localOptions['backend'], method=method,
+                        headers=headers, headers_processor=process_headers)
+        return d
+
+
diff --git a/data/nettests/manipulation/__init__.py b/data/nettests/manipulation/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/data/nettests/manipulation/captiveportal.py b/data/nettests/manipulation/captiveportal.py
new file mode 100644
index 0000000..a0f8c6b
--- /dev/null
+++ b/data/nettests/manipulation/captiveportal.py
@@ -0,0 +1,650 @@
+# -*- coding: utf-8 -*-
+# captiveportal
+# *************
+#
+# This test is a collection of tests to detect the presence of a
+# captive portal. Code is taken, in part, from the old ooni-probe,
+# which was written by Jacob Appelbaum and Arturo Filastò.
+#
+# This module performs multiple tests that match specific vendor captive
+# portal tests. This is a basic internet captive portal filter tester written
+# for RECon 2011.
+#
+# Read the following URLs to understand the captive portal detection process
+# for various vendors:
+#
+# http://technet.microsoft.com/en-us/library/cc766017%28WS.10%29.aspx
+# http://blog.superuser.com/2011/05/16/windows-7-network-awareness/
+# http://isc.sans.org/diary.html?storyid=10312&;
+# http://src.chromium.org/viewvc/chrome?view=rev&revision=74608
+# http://code.google.com/p/chromium-os/issues/detail?3281ttp,
+# http://crbug.com/52489
+# http://crbug.com/71736
+# https://bugzilla.mozilla.org/show_bug.cgi?id=562917
+# https://bugzilla.mozilla.org/show_bug.cgi?id=603505
+# http://lists.w3.org/Archives/Public/ietf-http-wg/2011JanMar/0086.html
+# http://tools.ietf.org/html/draft-nottingham-http-portal-02
+#
+# :authors: Jacob Appelbaum, Arturo Filastò, Isis Lovecruft
+# :license: see LICENSE for more details
+
+import base64
+import os
+import random
+import re
+import string
+import urllib2
+from urlparse import urlparse
+
+from twisted.python import usage
+from twisted.internet import defer, threads
+
+from ooni import nettest
+from ooni.templates import httpt
+from ooni.utils import net
+from ooni.utils import log
+
+try:
+    from dns import resolver
+except ImportError:
+    print "The dnspython module was not found:"
+    print "See https://crate.io/packages/dnspython/";
+    resolver = None
+
+__plugoo__ = "captiveportal"
+__desc__ = "Captive portal detection test"
+
+class UsageOptions(usage.Options):
+    optParameters = [['asset', 'a', None, 'Asset file'],
+                 ['experiment-url', 'e', 'http://google.com/', 'Experiment URL'],
+                 ['user-agent', 'u', random.choice(net.userAgents),
+                  'User agent for HTTP requests']
+                ]
+
+class CaptivePortal(nettest.NetTestCase):
+    """
+    Compares content and status codes of HTTP responses, and attempts
+    to determine if content has been altered.
+    """
+
+    name = "captivep"
+    description = "Captive Portal Test"
+    version = '0.2'
+    author = "Isis Lovecruft"
+    usageOptions = UsageOptions
+
+    def http_fetch(self, url, headers={}):
+        """
+        Parses an HTTP url, fetches it, and returns a urllib2 response
+        object.
+        """
+        url = urlparse(url).geturl()
+        request = urllib2.Request(url, None, headers)
+        #XXX: HTTP Error 302: The HTTP server returned a redirect error that
+        #would lead to an infinite loop.  The last 30x error message was: Found
+        try:
+            response = urllib2.urlopen(request)
+            response_headers = dict(response.headers)
+            return response, response_headers
+        except urllib2.HTTPError, e:
+            log.err("HTTPError: %s" % e)
+            return None, None
+
+    def http_content_match_fuzzy_opt(self, experimental_url, control_result,
+                                     headers=None, fuzzy=False):
+        """
+        Makes an HTTP request on port 80 for experimental_url, then
+        compares the response_content of experimental_url with the
+        control_result. Optionally, if the fuzzy parameter is set to
+        True, the response_content is compared with a regex of the
+        control_result. If the response_content from the
+        experimental_url and the control_result match, returns True
+        with the HTTP status code and headers; False, status code, and
+        headers if otherwise.
+        """
+
+        if headers is None:
+            default_ua = self.local_options['user-agent']
+            headers = {'User-Agent': default_ua}
+
+        response, response_headers = self.http_fetch(experimental_url, headers)
+
+        response_content = response.read() if response else None
+        response_code = response.code if response else None
+        if response_content is None:
+            log.err("HTTP connection appears to have failed.")
+            return False, False, False
+
+        if fuzzy:
+            pattern = re.compile(control_result)
+            match = pattern.search(response_content)
+            log.msg("Fuzzy HTTP content comparison for experiment URL")
+            log.msg("'%s'" % experimental_url)
+            if not match:
+                log.msg("does not match!")
+                return False, response_code, response_headers
+            else:
+                log.msg("and the expected control result yielded a match.")
+                return True, response_code, response_headers
+        else:
+            if str(response_content) != str(control_result):
+                log.msg("HTTP content comparison of experiment URL")
+                log.msg("'%s'" % experimental_url)
+                log.msg("and the expected control result do not match.")
+                return False, response_code, response_headers
+            else:
+                return True, response_code, response_headers
+
+    def http_status_code_match(self, experiment_code, control_code):
+        """
+        Compare two HTTP status codes, returns True if they match.
+        """
+        return int(experiment_code) == int(control_code)
+
+    def http_status_code_no_match(self, experiment_code, control_code):
+        """
+        Compare two HTTP status codes, returns True if they do not match.
+        """
+        return int(experiment_code) != int(control_code)
+
+    def dns_resolve(self, hostname, nameserver=None):
+        """
+        Resolves hostname(s) though nameserver to corresponding
+        address(es). hostname may be either a single hostname string,
+        or a list of strings. If nameserver is not given, use local
+        DNS resolver, and if that fails try using 8.8.8.8.
+        """
+        if not resolver:
+            log.msg("dnspython is not installed.\
+                    Cannot perform DNS Resolve test")
+            return []
+        if isinstance(hostname, str):
+            hostname = [hostname]
+
+        if nameserver is not None:
+            res = resolver.Resolver(configure=False)
+            res.nameservers = [nameserver]
+        else:
+            res = resolver.Resolver()
+
+        response = []
+        answer = None
+
+        for hn in hostname:
+            try:
+                answer = res.query(hn)
+            except resolver.NoNameservers:
+                res.nameservers = ['8.8.8.8']
+                try:
+                    answer = res.query(hn)
+                except resolver.NXDOMAIN:
+                    log.msg("DNS resolution for %s returned NXDOMAIN" % hn)
+                    response.append('NXDOMAIN')
+            except resolver.NXDOMAIN:
+                log.msg("DNS resolution for %s returned NXDOMAIN" % hn)
+                response.append('NXDOMAIN')
+            finally:
+                if not answer:
+                    return response
+                for addr in answer:
+                    response.append(addr.address)
+        return response
+
+    def dns_resolve_match(self, experiment_hostname, control_address):
+        """
+        Resolve experiment_hostname, and check to see that it returns
+        an experiment_address which matches the control_address.  If
+        they match, returns True and experiment_address; otherwise
+        returns False and experiment_address.
+        """
+        experiment_address = self.dns_resolve(experiment_hostname)
+        if not experiment_address:
+            log.debug("dns_resolve() for %s failed" % experiment_hostname)
+            return None, experiment_address
+
+        if len(set(experiment_address) & set([control_address])) > 0:
+            return True, experiment_address
+        else:
+            log.msg("DNS comparison of control '%s' does not" % control_address)
+            log.msg("match experiment response '%s'" % experiment_address)
+            return False, experiment_address
+
+    def get_auth_nameservers(self, hostname):
+        """
+        Many CPs set a nameserver to be used. Let's query that
+        nameserver for the authoritative nameservers of hostname.
+
+        The equivalent of:
+        $ dig +short NS ooni.nu
+        """
+        if not resolver:
+            log.msg("dnspython not installed.")
+            log.msg("Cannot perform test.")
+            return []
+
+        res = resolver.Resolver()
+        answer = res.query(hostname, 'NS')
+        auth_nameservers = []
+        for auth in answer:
+            auth_nameservers.append(auth.to_text())
+        return auth_nameservers
+
+    def hostname_to_0x20(self, hostname):
+        """
+        MaKEs yOur HOsTnaME lOoK LiKE THis.
+
+        For more information, see:
+        D. Dagon, et. al. "Increased DNS Forgery Resistance
+        Through 0x20-Bit Encoding". Proc. CSS, 2008.
+        """
+        hostname_0x20 = ''
+        for char in hostname:
+            l33t = random.choice(['caps', 'nocaps'])
+            if l33t == 'caps':
+                hostname_0x20 += char.capitalize()
+            else:
+                hostname_0x20 += char.lower()
+        return hostname_0x20
+
+    def check_0x20_to_auth_ns(self, hostname, sample_size=None):
+        """
+        Resolve a 0x20 DNS request for hostname over hostname's
+        authoritative nameserver(s), and check to make sure that
+        the capitalization in the 0x20 request matches that of the
+        response. Also, check the serial numbers of the SOA (Start
+        of Authority) records on the authoritative nameservers to
+        make sure that they match.
+
+        If sample_size is given, a random sample equal to that number
+        of authoritative nameservers will be queried; default is 5.
+        """
+        log.msg("")
+        log.msg("Testing random capitalization of DNS queries...")
+        log.msg("Testing that Start of Authority serial numbers match...")
+
+        auth_nameservers = self.get_auth_nameservers(hostname)
+
+        if sample_size is None:
+            sample_size = 5
+            resolved_auth_ns = random.sample(self.dns_resolve(auth_nameservers),
+                                             sample_size)
+
+        querynames = []
+        answernames = []
+        serials = []
+
+        # Even when gevent monkey patching is on, the requests here
+        # are sent without being 0x20'd, so we need to 0x20 them.
+        hostname = self.hostname_to_0x20(hostname)
+
+        for auth_ns in resolved_auth_ns:
+            res = resolver.Resolver(configure=False)
+            res.nameservers = [auth_ns]
+            try:
+                answer = res.query(hostname, 'SOA')
+            except resolver.Timeout:
+                continue
+            querynames.append(answer.qname.to_text())
+            answernames.append(answer.rrset.name.to_text())
+            for soa in answer:
+                serials.append(str(soa.serial))
+
+        if len(set(querynames).intersection(answernames)) == 1:
+            log.msg("Capitalization in DNS queries and responses match.")
+            name_match = True
+        else:
+            log.msg("The random capitalization '%s' used in" % hostname)
+            log.msg("DNS queries to that hostname's authoritative")
+            log.msg("nameservers does not match the capitalization in")
+            log.msg("the response.")
+            name_match = False
+
+        if len(set(serials)) == 1:
+            log.msg("Start of Authority serial numbers all match.")
+            serial_match = True
+        else:
+            log.msg("Some SOA serial numbers did not match the rest!")
+            serial_match = False
+
+        ret = name_match, serial_match, querynames, answernames, serials
+
+        if name_match and serial_match:
+            log.msg("Your DNS queries do not appear to be tampered.")
+            return ret
+        elif name_match or serial_match:
+            log.msg("Something is tampering with your DNS queries.")
+            return ret
+        elif not name_match and not serial_match:
+            log.msg("Your DNS queries are definitely being tampered with.")
+            return ret
+
+    def get_random_url_safe_string(self, length):
+        """
+        Returns a random url-safe string of specified length, where
+        0 < length <= 256. The returned string will always start with
+        an alphabetic character.
+        """
+        if (length <= 0):
+            length = 1
+        elif (length > 256):
+            length = 256
+
+        random_ascii = base64.urlsafe_b64encode(os.urandom(int(length)))
+
+        while not random_ascii[:1].isalpha():
+            random_ascii = base64.urlsafe_b64encode(os.urandom(int(length)))
+
+        three_quarters = int((len(random_ascii)) * (3.0/4.0))
+        random_string = random_ascii[:three_quarters]
+        return random_string
+
+    def get_random_hostname(self, length=None):
+        """
+        Returns a random hostname with SLD of specified length. If
+        length is unspecified, length=32 is used.
+
+        These *should* all resolve to NXDOMAIN. If they actually
+        resolve to a box that isn't part of a captive portal that
+        would be rather interesting.
+        """
+        if length is None:
+            length = 32
+
+        random_sld = self.get_random_url_safe_string(length)
+
+        # if it doesn't start with a letter, chuck it.
+        while not random_sld[:1].isalpha():
+            random_sld = self.get_random_url_safe_string(length)
+
+        tld_list = ['.com', '.net', '.org', '.info', '.test', '.invalid']
+        random_tld = urllib2.random.choice(tld_list)
+        random_hostname = random_sld + random_tld
+        return random_hostname
+
+    def compare_random_hostnames(self, hostname_count=None, hostname_length=None):
+        """
+        Get hostname_count number of random hostnames with SLD length
+        of hostname_length, and then attempt DNS resolution. If no
+        arguments are given, default to three hostnames of 32 bytes
+        each. These random hostnames *should* resolve to NXDOMAIN,
+        except in the case where a user is presented with a captive
+        portal and remains unauthenticated, in which case the captive
+        portal may return the address of the authentication page.
+
+        If the cardinality of the intersection of the set of resolved
+        random hostnames and the single element control set
+        (['NXDOMAIN']) are equal to one, then DNS properly resolved.
+
+        Returns true if only NXDOMAINs were returned, otherwise returns
+        False with the relative complement of the control set in the
+        response set.
+        """
+        if hostname_count is None:
+            hostname_count = 3
+
+        log.msg("Generating random hostnames...")
+        log.msg("Resolving DNS for %d random hostnames..." % hostname_count)
+
+        control = ['NXDOMAIN']
+        responses = []
+
+        for x in range(hostname_count):
+            random_hostname = self.get_random_hostname(hostname_length)
+            response_match, response_address = self.dns_resolve_match(random_hostname,
+                                                                      control[0])
+            for address in response_address:
+                if response_match is False:
+                    log.msg("Strangely, DNS resolution of the random hostname")
+                    log.msg("%s actually points to %s"
+                             % (random_hostname, response_address))
+                    responses = responses + [address]
+                else:
+                    responses = responses + [address]
+
+        intersection = set(responses) & set(control)
+        relative_complement = set(responses) - set(control)
+        r = set(responses)
+
+        if len(intersection) == 1:
+            log.msg("All %d random hostnames properly resolved to NXDOMAIN."
+                     % hostname_count)
+            return True, relative_complement
+        elif (len(intersection) == 1) and (len(r) > 1):
+            log.msg("Something odd happened. Some random hostnames correctly")
+            log.msg("resolved to NXDOMAIN, but several others resolved to")
+            log.msg("to the following addresses: %s" % relative_complement)
+            return False, relative_complement
+        elif (len(intersection) == 0) and (len(r) == 1):
+            log.msg("All random hostnames resolved to the IP address ")
+            log.msg("'%s', which is indicative of a captive portal." % r)
+            return False, relative_complement
+        else:
+            log.debug("Apparently, pigs are flying on your network, 'cause a")
+            log.debug("bunch of hostnames made from 32-byte random strings")
+            log.debug("just magically resolved to a bunch of random addresses.")
+            log.debug("That is definitely highly improbable. In fact, my napkin")
+            log.debug("tells me that the probability of just one of those")
+            log.debug("hostnames resolving to an address is 1.68e-59, making")
+            log.debug("it nearly twice as unlikely as an MD5 hash collision.")
+            log.debug("Either someone is seriously messing with your network,")
+            log.debug("or else you are witnessing the impossible. %s" % r)
+            return False, relative_complement
+
+    def google_dns_cp_test(self):
+        """
+        Google Chrome resolves three 10-byte random hostnames.
+        """
+        subtest = "Google Chrome DNS-based"
+        log.msg("Running the Google Chrome DNS-based captive portal test...")
+
+        gmatch, google_dns_result = self.compare_random_hostnames(3, 10)
+
+        if gmatch:
+            log.msg("Google Chrome DNS-based captive portal test did not")
+            log.msg("detect a captive portal.")
+            return google_dns_result
+        else:
+            log.msg("Google Chrome DNS-based captive portal test believes")
+            log.msg("you are in a captive portal, or else something very")
+            log.msg("odd is happening with your DNS.")
+            return google_dns_result
+
+    def ms_dns_cp_test(self):
+        """
+        Microsoft "phones home" to a server which will always resolve
+        to the same address.
+        """
+        subtest = "Microsoft NCSI DNS-based"
+
+        log.msg("")
+        log.msg("Running the Microsoft NCSI DNS-based captive portal")
+        log.msg("test...")
+
+        msmatch, ms_dns_result = self.dns_resolve_match("dns.msftncsi.com",
+                                                        "131.107.255.255")
+        if msmatch:
+            log.msg("Microsoft NCSI DNS-based captive portal test did not")
+            log.msg("detect a captive portal.")
+            return ms_dns_result
+        else:
+            log.msg("Microsoft NCSI DNS-based captive portal test ")
+            log.msg("believes you are in a captive portal.")
+            return ms_dns_result
+
+    def run_vendor_dns_tests(self):
+        """
+        Run the vendor DNS tests.
+        """
+        report = {}
+        report['google_dns_cp'] = self.google_dns_cp_test()
+        report['ms_dns_cp'] = self.ms_dns_cp_test()
+
+        return report
+
+    def run_vendor_tests(self, *a, **kw):
+        """
+        These are several vendor tests used to detect the presence of
+        a captive portal. Each test compares HTTP status code and
+        content to the control results and has its own User-Agent
+        string, in order to emulate the test as it would occur on the
+        device it was intended for. Vendor tests are defined in the
+        format:
+        [exp_url, ctrl_result, ctrl_code, ua, test_name]
+        """
+
+        vendor_tests = [['http://www.apple.com/library/test/success.html',
+                         'Success',
+                         '200',
+                         'Mozilla/5.0 (iPhone; U; CPU like Mac OS X; en) AppleWebKit/420+ (KHTML, like Gecko) Version/3.0 Mobile/1A543a Safari/419.3',
+                         'Apple HTTP Captive Portal'],
+                        ['http://tools.ietf.org/html/draft-nottingham-http-portal-02',
+                         '428 Network Authentication Required',
+                         '428',
+                         'Mozilla/5.0 (Windows NT 6.1; rv:5.0) Gecko/20100101 Firefox/5.0',
+                         'W3 Captive Portal'],
+                        ['http://www.msftncsi.com/ncsi.txt',
+                         'Microsoft NCSI',
+                         '200',
+                         'Microsoft NCSI',
+                         'MS HTTP Captive Portal',]]
+
+        cm = self.http_content_match_fuzzy_opt
+        sm = self.http_status_code_match
+        snm = self.http_status_code_no_match
+
+        def compare_content(status_func, fuzzy, experiment_url, control_result,
+                            control_code, headers, test_name):
+            log.msg("")
+            log.msg("Running the %s test..." % test_name)
+
+            content_match, experiment_code, experiment_headers = cm(experiment_url,
+                                                                    control_result,
+                                                                    headers, fuzzy)
+            status_match = status_func(experiment_code, control_code)
+
+            if status_match and content_match:
+                log.msg("The %s test was unable to detect" % test_name)
+                log.msg("a captive portal.")
+                return True
+            else:
+                log.msg("The %s test shows that your network" % test_name)
+                log.msg("is filtered.")
+                return False
+
+        result = []
+        for vt in vendor_tests:
+            report = {}
+            report['vt'] = vt
+
+            experiment_url = vt[0]
+            control_result = vt[1]
+            control_code = vt[2]
+            headers = {'User-Agent': vt[3]}
+            test_name = vt[4]
+
+            args = (experiment_url, control_result, control_code, headers, test_name)
+
+            if test_name == "MS HTTP Captive Portal":
+                report['result'] = compare_content(sm, False, *args)
+
+            elif test_name == "Apple HTTP Captive Portal":
+                report['result'] = compare_content(sm, True, *args)
+
+            elif test_name == "W3 Captive Portal":
+                report['result'] = compare_content(snm, True, *args)
+
+            else:
+                log.err("Ooni is trying to run an undefined CP vendor test.")
+            result.append(report)
+        return result
+
+    def control(self, experiment_result, args):
+        """
+        Compares the content and status code of the HTTP response for
+        experiment_url with the control_result and control_code
+        respectively. If the status codes match, but the experimental
+        content and control_result do not match, fuzzy matching is enabled
+        to determine if the control_result is at least included somewhere
+        in the experimental content. Returns True if matches are found,
+        and False if otherwise.
+        """
+        # XXX put this back to being parametrized
+        #experiment_url = self.local_options['experiment-url']
+        experiment_url = 'http://google.com/'
+        control_result = 'XX'
+        control_code = 200
+        ua = self.local_options['user-agent']
+
+        cm = self.http_content_match_fuzzy_opt
+        sm = self.http_status_code_match
+        snm = self.http_status_code_no_match
+
+        log.msg("Running test for '%s'..." % experiment_url)
+        content_match, experiment_code, experiment_headers = cm(experiment_url,
+                                                                control_result)
+        status_match = sm(experiment_code, control_code)
+        if status_match and content_match:
+            log.msg("The test for '%s'" % experiment_url)
+            log.msg("was unable to detect a captive portal.")
+
+            self.report['result'] = True
+
+        elif status_match and not content_match:
+            log.msg("Retrying '%s' with fuzzy match enabled."
+                     % experiment_url)
+            fuzzy_match, experiment_code, experiment_headers = cm(experiment_url,
+                                                                  control_result,
+                                                                  fuzzy=True)
+            if fuzzy_match:
+                self.report['result'] = True
+            else:
+                log.msg("Found modified content on '%s'," % experiment_url)
+                log.msg("which could indicate a captive portal.")
+
+                self.report['result'] = False
+        else:
+            log.msg("The content comparison test for ")
+            log.msg("'%s'" % experiment_url)
+            log.msg("shows that your HTTP traffic is filtered.")
+
+            self.report['result'] = False
+
+    @defer.inlineCallbacks
+    def test_captive_portal(self):
+        """
+        Runs the CaptivePortal(Test).
+
+        CONFIG OPTIONS
+        --------------
+
+        If "do_captive_portal_vendor_tests" is set to "true", then vendor
+        specific captive portal HTTP-based tests will be run.
+
+        If "do_captive_portal_dns_tests" is set to "true", then vendor
+        specific captive portal DNS-based tests will be run.
+
+        If "check_dns_requests" is set to "true", then Ooni-probe will
+        attempt to check that your DNS requests are not being tampered with
+        by a captive portal.
+
+        If "captive_portal" = "yourfilename.txt", then user-specified tests
+        will be run.
+
+        Any combination of the above tests can be run.
+        """
+
+        log.msg("")
+        log.msg("Running vendor tests...")
+        self.report['vendor_tests'] = yield threads.deferToThread(self.run_vendor_tests)
+
+        log.msg("")
+        log.msg("Running vendor DNS-based tests...")
+        self.report['vendor_dns_tests'] = yield threads.deferToThread(self.run_vendor_dns_tests)
+
+        log.msg("")
+        log.msg("Checking that DNS requests are not being tampered...")
+        self.report['check0x20'] = yield threads.deferToThread(self.check_0x20_to_auth_ns, 'ooni.nu')
+
+        log.msg("")
+        log.msg("Captive portal test finished!")
+
diff --git a/data/nettests/manipulation/daphne.py b/data/nettests/manipulation/daphne.py
new file mode 100644
index 0000000..09279fa
--- /dev/null
+++ b/data/nettests/manipulation/daphne.py
@@ -0,0 +1,119 @@
+# -*- encoding: utf-8 -*-
+from twisted.python import usage
+from twisted.internet import protocol, endpoints, reactor
+
+from ooni import nettest
+from ooni.kit import daphn3
+from ooni.utils import log
+
+class Daphn3ClientProtocol(daphn3.Daphn3Protocol):
+    def nextStep(self):
+        log.debug("Moving on to next step in the state walk")
+        self.current_data_received = 0
+        if self.current_step >= (len(self.steps) - 1):
+            log.msg("Reached the end of the state machine")
+            log.msg("Censorship fingerpint bisected!")
+            step_idx, mutation_idx = self.factory.mutation
+            log.msg("step_idx: %s | mutation_id: %s" % (step_idx, mutation_idx))
+            #self.transport.loseConnection()
+            if self.report:
+                self.report['mutation_idx'] = mutation_idx
+                self.report['step_idx'] = step_idx
+            self.d.callback(None)
+            return
+        else:
+            self.current_step += 1
+        if self._current_step_role() == self.role:
+            # We need to send more data because we are again responsible for
+            # doing so.
+            self.sendPayload()
+
+
+class Daphn3ClientFactory(protocol.ClientFactory):
+    protocol = daphn3.Daphn3Protocol
+    mutation = [0,0]
+    steps = None
+
+    def buildProtocol(self, addr):
+        p = self.protocol()
+        p.steps = self.steps
+        p.factory = self
+        return p
+
+    def startedConnecting(self, connector):
+        log.msg("Started connecting %s" % connector)
+
+    def clientConnectionFailed(self, reason, connector):
+        log.err("We failed connecting the the OONIB")
+        log.err("Cannot perform test. Perhaps it got blocked?")
+        log.err("Please report this to tor-assistants@xxxxxxxxxxxxxx")
+
+    def clientConnectionLost(self, reason, connector):
+        log.err("Daphn3 client connection lost")
+        print reason
+
+class daphn3Args(usage.Options):
+    optParameters = [
+                     ['host', 'h', '127.0.0.1', 'Target Hostname'],
+                     ['port', 'p', 57003, 'Target port number']]
+
+    optFlags = [['pcap', 'c', 'Specify that the input file is a pcap file'],
+                ['yaml', 'y', 'Specify that the input file is a YAML file (default)']]
+
+class daphn3Test(nettest.NetTestCase):
+
+    name = "Daphn3"
+    usageOptions = daphn3Args
+    inputFile = ['file', 'f', None, 
+            'Specify the pcap or YAML file to be used as input to the test']
+
+    #requiredOptions = ['file']
+
+    steps = None
+
+    def inputProcessor(self, filename):
+        """
+        step_idx is the step in the packet exchange
+        ex.
+        [.X.] are packets sent by a client or a server
+
+            client:  [.1.]        [.3.] [.4.]
+            server:         [.2.]             [.5.]
+
+        mutation_idx: is the sub index of the packet as in the byte of the
+        packet at the step_idx that is to be mutated
+
+        """
+        if self.localOptions['pcap']:
+            daphn3Steps = daphn3.read_pcap(filename)
+        else:
+            daphn3Steps = daphn3.read_yaml(filename)
+        log.debug("Loaded these steps %s" % daphn3Steps)
+        yield daphn3Steps
+
+    def test_daphn3(self):
+        host = self.localOptions['host']
+        port = int(self.localOptions['port'])
+
+        def failure(failure):
+            log.msg("Failed to connect")
+            self.report['censored'] = True
+            self.report['mutation'] = 0
+            raise Exception("Error in connection, perhaps the backend is censored")
+            return
+
+        def success(protocol):
+            log.msg("Successfully connected")
+            protocol.sendPayload()
+            return protocol.d
+
+        log.msg("Connecting to %s:%s" % (host, port))
+        endpoint = endpoints.TCP4ClientEndpoint(reactor, host, port)
+        daphn3_factory = Daphn3ClientFactory()
+        daphn3_factory.steps = self.input
+        daphn3_factory.report = self.report
+        d = endpoint.connect(daphn3_factory)
+        d.addErrback(failure)
+        d.addCallback(success)
+        return d
+
diff --git a/data/nettests/manipulation/dnsspoof.py b/data/nettests/manipulation/dnsspoof.py
new file mode 100644
index 0000000..5c50c2f
--- /dev/null
+++ b/data/nettests/manipulation/dnsspoof.py
@@ -0,0 +1,69 @@
+from twisted.internet import defer
+from twisted.python import usage
+
+from scapy.all import IP, UDP, DNS, DNSQR
+
+from ooni.templates import scapyt
+from ooni.utils import log
+
+class UsageOptions(usage.Options):
+    optParameters = [['resolver', 'r', None,
+                    'Specify the resolver that should be used for DNS queries (ip:port)'],
+                    ['hostname', 'h', None,
+                        'Specify the hostname of a censored site'],
+                    ['backend', 'b', '8.8.8.8:53',
+                        'Specify the IP address of a good DNS resolver (ip:port)']
+                    ]
+
+
+class DNSSpoof(scapyt.ScapyTest):
+    name = "DNS Spoof"
+    timeout = 2
+
+    usageOptions = UsageOptions
+
+    requiredOptions = ['hostname', 'resolver']
+
+    def setUp(self):
+        self.resolverAddr, self.resolverPort = self.localOptions['resolver'].split(':')
+        self.resolverPort = int(self.resolverPort)
+
+        self.controlResolverAddr, self.controlResolverPort = self.localOptions['backend'].split(':')
+        self.controlResolverPort = int(self.controlResolverPort)
+
+        self.hostname = self.localOptions['hostname']
+
+    def postProcessor(self, report):
+        """
+        This is not tested, but the concept is that if the two responses
+        match up then spoofing is occuring.
+        """
+        try:
+            test_answer = report['test_a_lookup']['answered_packets'][0][1]
+            control_answer = report['test_control_a_lookup']['answered_packets'][0][1]
+        except IndexError:
+            self.report['spoofing'] = 'no_answer'
+            return
+
+        if test_answer[UDP] == control_answer[UDP]:
+                self.report['spoofing'] = True
+        else:
+            self.report['spoofing'] = False
+        return
+
+    @defer.inlineCallbacks
+    def test_a_lookup(self):
+        question = IP(dst=self.resolverAddr)/UDP()/DNS(rd=1,
+                qd=DNSQR(qtype="A", qclass="IN", qname=self.hostname))
+        log.msg("Performing query to %s with %s:%s" % (self.hostname, self.resolverAddr, self.resolverPort))
+        yield self.sr1(question)
+
+    @defer.inlineCallbacks
+    def test_control_a_lookup(self):
+        question = IP(dst=self.controlResolverAddr)/UDP()/DNS(rd=1,
+                qd=DNSQR(qtype="A", qclass="IN", qname=self.hostname))
+        log.msg("Performing query to %s with %s:%s" % (self.hostname,
+            self.controlResolverAddr, self.controlResolverPort))
+        yield self.sr1(question)
+
+
diff --git a/data/nettests/manipulation/http_header_field_manipulation.py b/data/nettests/manipulation/http_header_field_manipulation.py
new file mode 100644
index 0000000..509f4ef
--- /dev/null
+++ b/data/nettests/manipulation/http_header_field_manipulation.py
@@ -0,0 +1,189 @@
+# -*- encoding: utf-8 -*-
+#
+# :authors: Arturo Filastò
+# :licence: see LICENSE
+
+import random
+import json
+import yaml
+
+from twisted.python import usage
+
+from ooni.utils import log, net, randomStr
+from ooni.templates import httpt
+from ooni.utils.txagentwithsocks import TrueHeaders
+
+def random_capitalization(string):
+    output = ""
+    original_string = string
+    string = string.swapcase()
+    for i in range(len(string)):
+        if random.randint(0, 1):
+            output += string[i].swapcase()
+        else:
+            output += string[i]
+    if original_string == output:
+        return random_capitalization(output)
+    else:
+        return output
+
+class UsageOptions(usage.Options):
+    optParameters = [
+            ['backend', 'b', 'http://127.0.0.1:57001', 
+                'URL of the backend to use for sending the requests'],
+            ['headers', 'h', None,
+                'Specify a yaml formatted file from which to read the request headers to send']
+            ]
+
+class HTTPHeaderFieldManipulation(httpt.HTTPTest):
+    """
+    It performes HTTP requests with request headers that vary capitalization
+    towards a backend. If the headers reported by the server differ from
+    the ones we sent, then we have detected tampering.
+    """
+    name = "HTTP Header Field Manipulation"
+    author = "Arturo Filastò"
+    version = "0.1.3"
+
+    randomizeUA = False
+    usageOptions = UsageOptions
+
+    requiredOptions = ['backend']
+
+    def get_headers(self):
+        headers = {}
+        if self.localOptions['headers']:
+            try:
+                f = open(self.localOptions['headers'])
+            except IOError:
+                raise Exception("Specified input file does not exist")
+            content = ''.join(f.readlines())
+            f.close()
+            headers = yaml.safe_load(content)
+            return headers
+        else:
+            # XXX generate these from a random choice taken from whatheaders.com
+            # http://s3.amazonaws.com/data.whatheaders.com/whatheaders-latest.xml.zip
+            headers = {"User-Agent": [random.choice(net.userAgents)],
+                "Accept": ["text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8"],
+                "Accept-Encoding": ["gzip,deflate,sdch"],
+                "Accept-Language": ["en-US,en;q=0.8"],
+                "Accept-Charset": ["ISO-8859-1,utf-8;q=0.7,*;q=0.3"],
+                "Host": [randomStr(15)+'.com']
+            }
+            return headers
+
+    def get_random_caps_headers(self):
+        headers = {}
+        normal_headers = self.get_headers()
+        for k, v in normal_headers.items():
+            new_key = random_capitalization(k)
+            headers[new_key] = v
+        return headers
+
+    def processInputs(self):
+        if self.localOptions['backend']:
+            self.url = self.localOptions['backend']
+        else:
+            raise Exception("No backend specified")
+
+    def processResponseBody(self, data):
+        self.check_for_tampering(data)
+
+    def check_for_tampering(self, data):
+        """
+        Here we do checks to verify if the request we made has been tampered
+        with. We have 3 categories of tampering:
+
+        *  **total** when the response is not a json object and therefore we were not
+        able to reach the ooniprobe test backend
+
+        *  **request_line_capitalization** when the HTTP Request line (e.x. GET /
+        HTTP/1.1) does not match the capitalization we set.
+
+        *  **header_field_number** when the number of headers we sent does not match
+        with the ones the backend received
+
+        *  **header_name_capitalization** when the header field names do not match
+        those that we sent.
+
+        *  **header_field_value** when the header field value does not match with the
+        one we transmitted.
+        """
+        log.msg("Checking for tampering on %s" % self.url)
+
+        self.report['tampering'] = {
+            'total': False,
+            'request_line_capitalization': False,
+            'header_name_capitalization': False,
+            'header_field_value': False,
+            'header_field_number': False
+        }
+        try:
+            response = json.loads(data)
+        except ValueError:
+            self.report['tampering']['total'] = True
+            return
+
+        request_request_line = "%s / HTTP/1.1" % self.request_method
+
+        try:
+            response_request_line = response['request_line']
+            response_headers_dict = response['headers_dict']
+        except KeyError:
+            self.report['tampering']['total'] = True
+            return
+
+        if request_request_line != response_request_line:
+            self.report['tampering']['request_line_capitalization'] = True
+
+        request_headers = TrueHeaders(self.request_headers)
+        diff = request_headers.getDiff(TrueHeaders(response_headers_dict),
+                ignore=['Connection'])
+        if diff:
+            self.report['tampering']['header_field_name'] = True
+        else:
+            self.report['tampering']['header_field_name'] = False
+        self.report['tampering']['header_name_diff'] = list(diff)
+        log.msg("    total: %(total)s" % self.report['tampering'])
+        log.msg("    request_line_capitalization: %(request_line_capitalization)s" % self.report['tampering'])
+        log.msg("    header_name_capitalization: %(header_name_capitalization)s" % self.report['tampering'])
+        log.msg("    header_field_value: %(header_field_value)s" % self.report['tampering'])
+        log.msg("    header_field_number: %(header_field_number)s" % self.report['tampering'])
+
+    def test_get(self):
+        self.request_method = "GET"
+        self.request_headers = self.get_random_caps_headers()
+        return self.doRequest(self.url, self.request_method,
+                headers=self.request_headers)
+
+    def test_get_random_capitalization(self):
+        self.request_method = random_capitalization("GET")
+        self.request_headers = self.get_random_caps_headers()
+        return self.doRequest(self.url, self.request_method,
+                headers=self.request_headers)
+
+    def test_post(self):
+        self.request_method = "POST"
+        self.request_headers = self.get_headers()
+        return self.doRequest(self.url, self.request_method,
+                headers=self.request_headers)
+
+    def test_post_random_capitalization(self):
+        self.request_method = random_capitalization("POST")
+        self.request_headers = self.get_random_caps_headers()
+        return self.doRequest(self.url, self.request_method,
+                headers=self.request_headers)
+
+    def test_put(self):
+        self.request_method = "PUT"
+        self.request_headers = self.get_headers()
+        return self.doRequest(self.url, self.request_method,
+                headers=self.request_headers)
+
+    def test_put_random_capitalization(self):
+        self.request_method = random_capitalization("PUT")
+        self.request_headers = self.get_random_caps_headers()
+        return self.doRequest(self.url, self.request_method,
+                headers=self.request_headers)
+
diff --git a/data/nettests/manipulation/http_host.py b/data/nettests/manipulation/http_host.py
new file mode 100644
index 0000000..d95d836
--- /dev/null
+++ b/data/nettests/manipulation/http_host.py
@@ -0,0 +1,141 @@
+# -*- encoding: utf-8 -*-
+#
+# HTTP Host Test
+# **************
+#
+# :authors: Arturo Filastò
+# :licence: see LICENSE
+
+import json
+from twisted.python import usage
+
+from ooni.utils import randomStr, randomSTR
+
+from ooni.utils import log
+from ooni.templates import httpt
+
+class UsageOptions(usage.Options):
+    optParameters = [['backend', 'b', 'http://127.0.0.1:57001',
+                      'URL of the test backend to use. Should be \
+                              listening on port 80 and be a \
+                              HTTPReturnJSONHeadersHelper'],
+                     ['content', 'c', None, 'The file to read \
+                            from containing the content of a block page']]
+
+class HTTPHost(httpt.HTTPTest):
+    """
+    This test is aimed at detecting the presence of a transparent HTTP proxy
+    and enumerating the sites that are being censored by it.
+
+    It places inside of the Host header field the hostname of the site that is
+    to be tested for censorship and then determines if the probe is behind a
+    transparent HTTP proxy (because the response from the backend server does
+    not match) and if the site is censorsed, by checking if the page that it
+    got back matches the input block page.
+    """
+    name = "HTTP Host"
+    author = "Arturo Filastò"
+    version = "0.2.3"
+
+    randomizeUA = False
+    usageOptions = UsageOptions
+
+    inputFile = ['file', 'f', None,
+            'List of hostnames to test for censorship']
+
+    requiredOptions = ['backend']
+
+    def test_filtering_prepend_newline_to_method(self):
+        headers = {}
+        headers["Host"] = [self.input]
+        return self.doRequest(self.localOptions['backend'], method="\nGET",
+                headers=headers)
+
+    def test_filtering_add_tab_to_host(self):
+        headers = {}
+        headers["Host"] = [self.input + '\t']
+        return self.doRequest(self.localOptions['backend'],
+                headers=headers)
+
+    def test_filtering_of_subdomain(self):
+        headers = {}
+        headers["Host"] = [randomStr(10) + '.' + self.input]
+        return self.doRequest(self.localOptions['backend'],
+                headers=headers)
+
+    def test_filtering_via_fuzzy_matching(self):
+        headers = {}
+        headers["Host"] = [randomStr(10) + self.input + randomStr(10)]
+        return self.doRequest(self.localOptions['backend'],
+                headers=headers)
+
+    def test_send_host_header(self):
+        """
+        Stuffs the HTTP Host header field with the site to be tested for
+        censorship and does an HTTP request of this kind to our backend.
+
+        We randomize the HTTP User Agent headers.
+        """
+        headers = {}
+        headers["Host"] = [self.input]
+        return self.doRequest(self.localOptions['backend'],
+                headers=headers)
+
+    def check_for_censorship(self, body):
+        """
+        If we have specified what a censorship page looks like here we will
+        check if the page we are looking at matches it.
+
+        XXX this is not tested, though it is basically what was used to detect
+        censorship in the palestine case.
+        """
+        if self.localOptions['content']:
+            self.report['censored'] = True
+            censorship_page = open(self.localOptions['content'])
+            response_page = iter(body.split("\n"))
+
+            for censorship_line in censorship_page.xreadlines():
+                response_line = response_page.next()
+                if response_line != censorship_line:
+                    self.report['censored'] = False
+                    break
+
+            censorship_page.close()
+        else:
+            self.report['censored'] = None
+
+    def processResponseBody(self, body):
+        """
+        XXX this is to be filled in with either a domclass based classified or
+        with a rule that will allow to detect that the body of the result is
+        that of a censored site.
+        """
+        # If we don't see a json array we know that something is wrong for
+        # sure
+        if not body.startswith("{"):
+            log.msg("This does not appear to be JSON")
+            self.report['transparent_http_proxy'] = True
+            self.check_for_censorship(body)
+            return
+        try:
+            content = json.loads(body)
+        except:
+            log.msg("The json does not parse, this is not what we expected")
+            self.report['transparent_http_proxy'] = True
+            self.check_for_censorship(body)
+            return
+
+        # We base the determination of the presence of a transparent HTTP
+        # proxy on the basis of the response containing the json that is to be
+        # returned by a HTTP Request Test Helper
+        if 'request_headers' in content and \
+                'request_line' in content and \
+                'headers_dict' in content:
+            log.msg("Found the keys I expected in %s" % content)
+            self.report['transparent_http_proxy'] = False
+            self.report['censored'] = False
+        else:
+            log.msg("Did not find the keys I expected in %s" % content)
+            self.report['transparent_http_proxy'] = True
+            self.check_for_censorship(body)
+
diff --git a/data/nettests/manipulation/http_invalid_request_line.py b/data/nettests/manipulation/http_invalid_request_line.py
new file mode 100644
index 0000000..2482282
--- /dev/null
+++ b/data/nettests/manipulation/http_invalid_request_line.py
@@ -0,0 +1,106 @@
+# -*- encoding: utf-8 -*-
+from twisted.python import usage
+
+from ooni.utils import log
+from ooni.utils import randomStr, randomSTR
+from ooni.templates import tcpt
+
+class UsageOptions(usage.Options):
+    optParameters = [['backend', 'b', '127.0.0.1',
+                        'The OONI backend that runs a TCP echo server'],
+                    ['backendport', 'p', 80, 'Specify the port that the TCP echo server is running (should only be set for debugging)']]
+
+class HTTPInvalidRequestLine(tcpt.TCPTest):
+    """
+    The goal of this test is to do some very basic and not very noisy fuzzing
+    on the HTTP request line. We generate a series of requests that are not
+    valid HTTP requests.
+
+    Unless elsewhere stated 'Xx'*N refers to N*2 random upper or lowercase
+    ascii letters or numbers ('XxXx' will be 4).
+    """
+    name = "HTTP Invalid Request Line"
+    version = "0.1.4"
+    authors = "Arturo Filastò"
+
+    usageOptions = UsageOptions
+    requiredOptions = ['backend']
+
+    def setUp(self):
+        self.port = int(self.localOptions['backendport'])
+        self.address = self.localOptions['backend']
+
+    def check_for_manipulation(self, response, payload):
+        log.debug("Checking if %s == %s" % (response, payload))
+        if response != payload:
+            self.report['tampering'] = True
+        else:
+            self.report['tampering'] = False
+
+    def test_random_invalid_method(self):
+        """
+        We test sending data to a TCP echo server listening on port 80, if what
+        we get back is not what we have sent then there is tampering going on.
+        This is for example what squid will return when performing such
+        request:
+
+            HTTP/1.0 400 Bad Request
+            Server: squid/2.6.STABLE21
+            Date: Sat, 23 Jul 2011 02:22:44 GMT
+            Content-Type: text/html
+            Content-Length: 1178
+            Expires: Sat, 23 Jul 2011 02:22:44 GMT
+            X-Squid-Error: ERR_INVALID_REQ 0
+            X-Cache: MISS from cache_server
+            X-Cache-Lookup: NONE from cache_server:3128
+            Via: 1.0 cache_server:3128 (squid/2.6.STABLE21)
+            Proxy-Connection: close
+
+        """
+        payload = randomSTR(4) + " / HTTP/1.1\n\r"
+
+        d = self.sendPayload(payload)
+        d.addCallback(self.check_for_manipulation, payload)
+        return d
+
+    def test_random_invalid_field_count(self):
+        """
+        This generates a request that looks like this:
+
+        XxXxX XxXxX XxXxX XxXxX
+
+        This may trigger some bugs in the HTTP parsers of transparent HTTP
+        proxies.
+        """
+        payload = ' '.join(randomStr(5) for x in range(4))
+        payload += "\n\r"
+
+        d = self.sendPayload(payload)
+        d.addCallback(self.check_for_manipulation, payload)
+        return d
+
+    def test_random_big_request_method(self):
+        """
+        This generates a request that looks like this:
+
+        Xx*512 / HTTP/1.1
+        """
+        payload = randomStr(1024) + ' / HTTP/1.1\n\r'
+
+        d = self.sendPayload(payload)
+        d.addCallback(self.check_for_manipulation, payload)
+        return d
+
+    def test_random_invalid_version_number(self):
+        """
+        This generates a request that looks like this:
+
+        GET / HTTP/XxX
+        """
+        payload = 'GET / HTTP/' + randomStr(3)
+        payload += '\n\r'
+
+        d = self.sendPayload(payload)
+        d.addCallback(self.check_for_manipulation, payload)
+        return d
+
diff --git a/data/nettests/manipulation/traceroute.py b/data/nettests/manipulation/traceroute.py
new file mode 100644
index 0000000..3f6f17b
--- /dev/null
+++ b/data/nettests/manipulation/traceroute.py
@@ -0,0 +1,143 @@
+# -*- encoding: utf-8 -*-
+#
+# :authors: Arturo Filastò
+# :licence: see LICENSE
+
+from twisted.python import usage
+from twisted.internet import defer
+
+from ooni.templates import scapyt
+
+from scapy.all import *
+
+from ooni.utils import log
+
+class UsageOptions(usage.Options):
+    optParameters = [
+                    ['backend', 'b', '8.8.8.8', 'Test backend to use'],
+                    ['timeout', 't', 5, 'The timeout for the traceroute test'],
+                    ['maxttl', 'm', 30, 'The maximum value of ttl to set on packets'],
+                    ['srcport', 'p', None, 'Set the source port to a specific value (only applies to TCP and UDP)']
+                    ]
+
+class TracerouteTest(scapyt.BaseScapyTest):
+    name = "Multi Protocol Traceroute Test"
+    author = "Arturo Filastò"
+    version = "0.1.1"
+
+    usageOptions = UsageOptions
+    dst_ports = [0, 22, 23, 53, 80, 123, 443, 8080, 65535]
+
+    def setUp(self):
+        def get_sport(protocol):
+            if self.localOptions['srcport']:
+                return int(self.localOptions['srcport'])
+            else:
+                return random.randint(1024, 65535)
+
+        self.get_sport = get_sport
+
+    def max_ttl_and_timeout(self):
+        max_ttl = int(self.localOptions['maxttl'])
+        timeout = int(self.localOptions['timeout'])
+        self.report['max_ttl'] = max_ttl
+        self.report['timeout'] = timeout
+        return max_ttl, timeout
+
+
+    def postProcessor(self, report):
+        tcp_hops = report['test_tcp_traceroute']
+        udp_hops = report['test_udp_traceroute']
+        icmp_hops = report['test_icmp_traceroute']
+
+
+    def test_tcp_traceroute(self):
+        """
+        Does a traceroute to the destination by sending TCP SYN packets
+        with TTLs from 1 until max_ttl.
+        """
+        def finished(packets, port):
+            log.debug("Finished running TCP traceroute test on port %s" % port)
+            answered, unanswered = packets
+            self.report['hops_'+str(port)] = []
+            for snd, rcv in answered:
+                try:
+                    sport = snd[UDP].sport
+                except IndexError:
+                    log.err("Source port for this traceroute was not found. This is probably a bug")
+                    sport = -1
+
+                report = {'ttl': snd.ttl,
+                        'address': rcv.src,
+                        'rtt': rcv.time - snd.time,
+                        'sport': sport
+                }
+                log.debug("%s: %s" % (port, report))
+                self.report['hops_'+str(port)].append(report)
+
+        dl = []
+        max_ttl, timeout = self.max_ttl_and_timeout()
+        for port in self.dst_ports:
+            packets = IP(dst=self.localOptions['backend'],
+                    ttl=(1,max_ttl),id=RandShort())/TCP(flags=0x2, dport=port,
+                            sport=self.get_sport('tcp'))
+
+            d = self.sr(packets, timeout=timeout)
+            d.addCallback(finished, port)
+            dl.append(d)
+        return defer.DeferredList(dl)
+
+    def test_udp_traceroute(self):
+        """
+        Does a traceroute to the destination by sending UDP packets with empty
+        payloads with TTLs from 1 until max_ttl.
+        """
+        def finished(packets, port):
+            log.debug("Finished running UDP traceroute test on port %s" % port)
+            answered, unanswered = packets
+            self.report['hops_'+str(port)] = []
+            for snd, rcv in answered:
+                report = {'ttl': snd.ttl,
+                        'address': rcv.src,
+                        'rtt': rcv.time - snd.time,
+                        'sport': snd[UDP].sport
+                }
+                log.debug("%s: %s" % (port, report))
+                self.report['hops_'+str(port)].append(report)
+        dl = []
+        max_ttl, timeout = self.max_ttl_and_timeout()
+        for port in self.dst_ports:
+            packets = IP(dst=self.localOptions['backend'],
+                    ttl=(1,max_ttl),id=RandShort())/UDP(dport=port,
+                            sport=self.get_sport('udp'))
+
+            d = self.sr(packets, timeout=timeout)
+            d.addCallback(finished, port)
+            dl.append(d)
+        return defer.DeferredList(dl)
+
+    def test_icmp_traceroute(self):
+        """
+        Does a traceroute to the destination by sending ICMP echo request
+        packets with TTLs from 1 until max_ttl.
+        """
+        def finished(packets):
+            log.debug("Finished running ICMP traceroute test")
+            answered, unanswered = packets
+            self.report['hops'] = []
+            for snd, rcv in answered:
+                report = {'ttl': snd.ttl,
+                        'address': rcv.src,
+                        'rtt': rcv.time - snd.time
+                }
+                log.debug("%s" % (report))
+                self.report['hops'].append(report)
+        dl = []
+        max_ttl, timeout = self.max_ttl_and_timeout()
+        packets = IP(dst=self.localOptions['backend'],
+                    ttl=(1,max_ttl), id=RandShort())/ICMP()
+
+        d = self.sr(packets, timeout=timeout)
+        d.addCallback(finished)
+        return d
+
diff --git a/data/nettests/scanning/__init__.py b/data/nettests/scanning/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/data/nettests/scanning/http_url_list.py b/data/nettests/scanning/http_url_list.py
new file mode 100644
index 0000000..0accaae
--- /dev/null
+++ b/data/nettests/scanning/http_url_list.py
@@ -0,0 +1,98 @@
+# -*- encoding: utf-8 -*-
+#
+# :authors: Arturo Filastò
+# :licence: see LICENSE
+
+from twisted.internet import defer
+from twisted.python import usage
+from ooni.templates import httpt
+from ooni.utils import log
+
+class UsageOptions(usage.Options):
+    optParameters = [['content', 'c', None,
+                        'The file to read from containing the content of a block page'],
+                     ['url', 'u', None, 'Specify a single URL to test.']
+                    ]
+
+class HTTPURLList(httpt.HTTPTest):
+    """
+    Performs GET, POST and PUT requests to a list of URLs specified as
+    input and checks if the page that we get back as a result matches that
+    of a block page given as input.
+
+    If no block page is given as input to the test it will simply collect the
+    responses to the HTTP requests and write them to a report file.
+    """
+    name = "HTTP URL List"
+    author = "Arturo Filastò"
+    version = "0.1.3"
+
+    usageOptions = UsageOptions
+
+    inputFile = ['file', 'f', None, 
+            'List of URLS to perform GET and POST requests to']
+
+    def setUp(self):
+        """
+        Check for inputs.
+        """
+        if self.input:
+            self.url = self.input
+        elif self.localOptions['url']:
+            self.url = self.localOptions['url']
+        else:
+            raise Exception("No input specified")
+
+    def check_for_content_censorship(self, body):
+        """
+        If we have specified what a censorship page looks like here we will
+        check if the page we are looking at matches it.
+
+        XXX this is not tested, though it is basically what was used to detect
+        censorship in the palestine case.
+        """
+        self.report['censored'] = True
+
+        censorship_page = open(self.localOptions['content']).xreadlines()
+        response_page = iter(body.split("\n"))
+
+        # We first allign the two pages to the first HTML tag (something
+        # starting with <). This is useful so that we can give as input to this
+        # test something that comes from the output of curl -kis
+        # http://the_page/
+        for line in censorship_page:
+            if line.strip().startswith("<"):
+                break
+        for line in response_page:
+            if line.strip().startswith("<"):
+                break
+
+        for censorship_line in censorship_page:
+            try:
+                response_line = response_page.next()
+            except StopIteration:
+                # The censored page and the response we got do not match in
+                # length.
+                self.report['censored'] = False
+                break
+            censorship_line = censorship_line.replace("\n", "")
+            if response_line != censorship_line:
+                self.report['censored'] = False
+
+        censorship_page.close()
+
+    def processResponseBody(self, body):
+        if self.localOptions['content']:
+            log.msg("Checking for censorship in response body")
+            self.check_for_content_censorship(body)
+
+    def test_get(self):
+        return self.doRequest(self.url, method="GET")
+
+    def test_post(self):
+        return self.doRequest(self.url, method="POST")
+
+    def test_put(self):
+        return self.doRequest(self.url, method="PUT")
+
+
diff --git a/data/nettests/third_party/Makefile b/data/nettests/third_party/Makefile
new file mode 100644
index 0000000..16adfe0
--- /dev/null
+++ b/data/nettests/third_party/Makefile
@@ -0,0 +1,3 @@
+fetch:
+	wget http://netalyzr.icsi.berkeley.edu/NetalyzrCLI.jar
+	chmod +x NetalyzrCLI.jar
diff --git a/data/nettests/third_party/README b/data/nettests/third_party/README
new file mode 100644
index 0000000..d9e435f
--- /dev/null
+++ b/data/nettests/third_party/README
@@ -0,0 +1,14 @@
+There is no license for NetalyzrCLI.jar; so while we include it, it's just
+for ease of use.
+
+We currently support interfacing with the ICSI Netalyzr system by wrapping
+the NetalyzrCLI.jar client. It was downloaded on August 5th, 2011 from the
+following URL:
+  http://netalyzr.icsi.berkeley.edu/NetalyzrCLI.jar
+
+More information about the client is available on the cli web page:
+  http://netalyzr.icsi.berkeley.edu/cli.html
+
+After looking at NetalyzrCLI.jar, I discovered that '-d' runs it in a
+debugging mode that is quite useful for understanding their testing
+framework as it runs.
diff --git a/data/nettests/third_party/__init__.py b/data/nettests/third_party/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/data/nettests/third_party/netalyzr.py b/data/nettests/third_party/netalyzr.py
new file mode 100644
index 0000000..9b21831
--- /dev/null
+++ b/data/nettests/third_party/netalyzr.py
@@ -0,0 +1,58 @@
+# -*- encoding: utf-8 -*-
+#
+# This is a wrapper around the Netalyzer Java command line client
+#
+# :authors: Jacob Appelbaum <jacob@xxxxxxxxxxxxx>
+#           Arturo "hellais" Filastò <art@xxxxxxxxx>
+# :licence: see LICENSE
+
+from ooni import nettest
+from ooni.utils import log
+import time
+import os
+from twisted.internet import reactor, threads, defer
+
+class NetalyzrWrapperTest(nettest.NetTestCase):
+    name = "NetalyzrWrapper"
+
+    def setUp(self):
+        cwd = os.path.abspath(os.path.join(os.path.abspath(__file__), '..'))
+
+        # XXX set the output directory to something more uniform
+        outputdir = os.path.join(cwd, '..', '..')
+
+        program_path = os.path.join(cwd, 'NetalyzrCLI.jar')
+        program = "java -jar %s -d" % program_path
+
+        test_token = time.asctime(time.gmtime()).replace(" ", "_").strip()
+
+        self.output_file = os.path.join(outputdir,
+                "NetalyzrCLI_" + test_token + ".out")
+        self.output_file.strip()
+        self.run_me = program + " 2>&1 >> " + self.output_file
+
+    def blocking_call(self):
+        try:
+            result = threads.blockingCallFromThread(reactor, os.system, self.run_me) 
+        except:
+            log.debug("Netalyzr had an error, please see the log file: %s" % self.output_file)
+        finally:
+            self.clean_up()
+
+    def clean_up(self):
+        self.report['netalyzr_report'] = self.output_file
+        log.debug("finished running NetalzrWrapper")
+        log.debug("Please check %s for Netalyzr output" % self.output_file)
+
+    def test_run_netalyzr(self):
+        """
+        This test simply wraps netalyzr and runs it from command line
+        """
+        log.msg("Running NetalyzrWrapper (this will take some time, be patient)")
+        log.debug("with command '%s'" % self.run_me)
+        # XXX we probably want to use a processprotocol here to obtain the
+        # stdout from Netalyzr. This would allows us to visualize progress
+        # (currently there is no progress because the stdout of os.system is
+        # trapped by twisted) and to include the link to the netalyzr report
+        # directly in the OONI report, perhaps even downloading it.
+        reactor.callInThread(self.blocking_call)
diff --git a/nettests/__init__.py b/nettests/__init__.py
deleted file mode 100644
index e69de29..0000000
diff --git a/nettests/blocking/__init__.py b/nettests/blocking/__init__.py
deleted file mode 100644
index 8b13789..0000000
--- a/nettests/blocking/__init__.py
+++ /dev/null
@@ -1 +0,0 @@
-
diff --git a/nettests/blocking/dnsconsistency.py b/nettests/blocking/dnsconsistency.py
deleted file mode 100644
index 7b6e7b9..0000000
--- a/nettests/blocking/dnsconsistency.py
+++ /dev/null
@@ -1,173 +0,0 @@
-# -*- encoding: utf-8 -*-
-#
-#  dnsconsistency
-#  **************
-#
-#  The test reports censorship if the cardinality of the intersection of
-#  the query result set from the control server and the query result set
-#  from the experimental server is zero, which is to say, if the two sets
-#  have no matching results whatsoever.
-#
-#  NOTE: This test frequently results in false positives due to GeoIP-based
-#  load balancing on major global sites such as google, facebook, and
-#  youtube, etc.
-#
-# :authors: Arturo Filastò, Isis Lovecruft
-# :licence: see LICENSE
-
-import pdb
-
-from twisted.python import usage
-from twisted.internet import defer
-
-from ooni.templates import dnst
-
-from ooni import nettest
-from ooni.utils import log
-
-class UsageOptions(usage.Options):
-    optParameters = [['backend', 'b', '8.8.8.8:53',
-                        'The OONI backend that runs the DNS resolver'],
-                     ['testresolvers', 'T', None,
-                        'File containing list of DNS resolvers to test against'],
-                     ['testresolver', 't', None,
-                         'Specify a single test resolver to use for testing']
-                    ]
-
-class DNSConsistencyTest(dnst.DNSTest):
-
-    name = "DNS Consistency"
-    description = "DNS censorship detection test"
-    version = "0.5"
-    authors = "Arturo Filastò, Isis Lovecruft"
-    requirements = None
-
-    inputFile = ['file', 'f', None,
-                 'Input file of list of hostnames to attempt to resolve']
-
-    usageOptions = UsageOptions
-    requiredOptions = ['backend', 'file']
-
-    def setUp(self):
-        if (not self.localOptions['testresolvers'] and \
-                not self.localOptions['testresolver']):
-            raise usage.UsageError("You did not specify a testresolver")
-
-        elif self.localOptions['testresolvers']:
-            test_resolvers_file = self.localOptions['testresolvers']
-
-        elif self.localOptions['testresolver']:
-            self.test_resolvers = [self.localOptions['testresolver']]
-
-        try:
-            with open(test_resolvers_file) as f:
-                self.test_resolvers = [x.split('#')[0].strip() for x in f.readlines()]
-                self.report['test_resolvers'] = self.test_resolvers
-            f.close()
-
-        except IOError, e:
-            log.exception(e)
-            raise usage.UsageError("Invalid test resolvers file")
-
-        except NameError:
-            log.debug("No test resolver file configured")
-
-        dns_ip, dns_port = self.localOptions['backend'].split(':')
-        self.control_dns_server = (dns_ip, int(dns_port))
-
-        self.report['control_resolver'] = self.control_dns_server
-
-    @defer.inlineCallbacks
-    def test_a_lookup(self):
-        """
-        We perform an A lookup on the DNS test servers for the domains to be
-        tested and an A lookup on the known good DNS server.
-
-        We then compare the results from test_resolvers and that from
-        control_resolver and see if the match up.
-        If they match up then no censorship is happening (tampering: false).
-
-        If they do not we do a reverse lookup (PTR) on the test_resolvers and
-        the control resolver for every IP address we got back and check to see
-        if anyone of them matches the control ones.
-
-        If they do then we take not of the fact that censorship is probably not
-        happening (tampering: reverse-match).
-
-        If they do not match then censorship is probably going on (tampering:
-        true).
-        """
-        log.msg("Doing the test lookups on %s" % self.input)
-        list_of_ds = []
-        hostname = self.input
-
-        self.report['tampering'] = {}
-
-        control_answers = yield self.performALookup(hostname, self.control_dns_server)
-        if not control_answers:
-                log.err("Got no response from control DNS server %s," \
-                        " perhaps the DNS resolver is down?" % self.control_dns_server[0])
-                self.report['tampering'][self.control_dns_server] = 'no_answer'
-                return
-
-        for test_resolver in self.test_resolvers:
-            log.msg("Testing resolver: %s" % test_resolver)
-            test_dns_server = (test_resolver, 53)
-
-            try:
-                experiment_answers = yield self.performALookup(hostname, test_dns_server)
-            except Exception, e:
-                log.err("Problem performing the DNS lookup")
-                log.exception(e)
-                self.report['tampering'][test_resolver] = 'dns_lookup_error'
-                continue
-
-            if not experiment_answers:
-                log.err("Got no response, perhaps the DNS resolver is down?")
-                self.report['tampering'][test_resolver] = 'no_answer'
-                continue
-            else:
-                log.debug("Got the following A lookup answers %s from %s" % (experiment_answers, test_resolver))
-
-            def lookup_details():
-                """
-                A closure useful for printing test details.
-                """
-                log.msg("test resolver: %s" % test_resolver)
-                log.msg("experiment answers: %s" % experiment_answers)
-                log.msg("control answers: %s" % control_answers)
-
-            log.debug("Comparing %s with %s" % (experiment_answers, control_answers))
-            if set(experiment_answers) & set(control_answers):
-                lookup_details()
-                log.msg("tampering: false")
-                self.report['tampering'][test_resolver] = False
-            else:
-                log.msg("Trying to do reverse lookup")
-
-                experiment_reverse = yield self.performPTRLookup(experiment_answers[0], test_dns_server)
-                control_reverse = yield self.performPTRLookup(control_answers[0], self.control_dns_server)
-
-                if experiment_reverse == control_reverse:
-                    log.msg("Further testing has eliminated false positives")
-                    lookup_details()
-                    log.msg("tampering: reverse_match")
-                    self.report['tampering'][test_resolver] = 'reverse_match'
-                else:
-                    log.msg("Reverse lookups do not match")
-                    lookup_details()
-                    log.msg("tampering: true")
-                    self.report['tampering'][test_resolver] = True
-
-    def inputProcessor(self, filename=None):
-        """
-        This inputProcessor extracts domain names from urls
-        """
-        log.debug("Running dnsconsistency default processor")
-        if filename:
-            fp = open(filename)
-            for x in fp.readlines():
-                yield x.strip().split('//')[-1].split('/')[0]
-            fp.close()
-        else:
-            pass
diff --git a/nettests/blocking/http_requests.py b/nettests/blocking/http_requests.py
deleted file mode 100644
index 9208739..0000000
--- a/nettests/blocking/http_requests.py
+++ /dev/null
@@ -1,130 +0,0 @@
-# -*- encoding: utf-8 -*-
-#
-# :authors: Arturo Filastò
-# :licence: see LICENSE
-
-import random
-from twisted.internet import defer
-from twisted.python import usage
-
-from ooni.utils import log
-from ooni.utils.net import userAgents
-from ooni.templates import httpt
-from ooni.errors import failureToString, handleAllFailures
-
-class UsageOptions(usage.Options):
-    optParameters = [
-                     ['url', 'u', None, 'Specify a single URL to test.'],
-                     ['factor', 'f', 0.8, 'What factor should be used for triggering censorship (0.8 == 80%)']
-                    ]
-
-class HTTPRequestsTest(httpt.HTTPTest):
-    """
-    Performs a two GET requests to the set of sites to be tested for
-    censorship, one over a known good control channel (Tor), the other over the
-    test network.
-
-    We check to see if the response headers match and if the response body
-    lengths match.
-    """
-    name = "HTTP Requests Test"
-    author = "Arturo Filastò"
-    version = "0.2.3"
-
-    usageOptions = UsageOptions
-
-    inputFile = ['file', 'f', None,
-            'List of URLS to perform GET and POST requests to']
-
-    # These values are used for determining censorship based on response body
-    # lengths
-    control_body_length = None
-    experiment_body_length = None
-
-    def setUp(self):
-        """
-        Check for inputs.
-        """
-        if self.input:
-            self.url = self.input
-        elif self.localOptions['url']:
-            self.url = self.localOptions['url']
-        else:
-            raise Exception("No input specified")
-
-        self.factor = self.localOptions['factor']
-        self.report['control_failure'] = None
-        self.report['experiment_failure'] = None
-
-    def compare_body_lengths(self, body_length_a, body_length_b):
-
-        if body_length_b == 0 and body_length_a != 0:
-            rel = float(body_length_b)/float(body_length_a)
-        elif body_length_b == 0 and body_length_a == 0:
-            rel = float(1)
-        else:
-            rel = float(body_length_a)/float(body_length_b)
-
-        if rel > 1:
-            rel = 1/rel
-
-        self.report['body_proportion'] = rel
-        self.report['factor'] = self.factor
-        if rel > self.factor:
-            log.msg("The two body lengths appear to match")
-            log.msg("censorship is probably not happening")
-            self.report['body_length_match'] = True
-        else:
-            log.msg("The two body lengths appear to not match")
-            log.msg("censorship could be happening")
-            self.report['body_length_match'] = False
-
-    def compare_headers(self, headers_a, headers_b):
-        diff = headers_a.getDiff(headers_b)
-        if diff:
-            log.msg("Headers appear to *not* match")
-            self.report['headers_diff'] = diff
-            self.report['headers_match'] = False
-        else:
-            log.msg("Headers appear to match")
-            self.report['headers_diff'] = diff
-            self.report['headers_match'] = True
-
-    def test_get(self):
-        def callback(res):
-            experiment, control = res
-            experiment_succeeded, experiment_result = experiment
-            control_succeeded, control_result = control
-
-            if control_succeeded and experiment_succeeded:
-                self.compare_body_lengths(len(experiment_result.body),
-                        len(control_result.body))
-
-                self.compare_headers(control_result.headers,
-                        experiment_result.headers)
-
-            if not control_succeeded:
-                self.report['control_failure'] = failureToString(control_result)
-
-            if not experiment_succeeded:
-                self.report['experiment_failure'] = failureToString(experiment_result)
-
-        headers = {'User-Agent': [random.choice(userAgents)]}
-
-        l = []
-        log.msg("Performing GET request to %s" % self.url)
-        experiment_request = self.doRequest(self.url, method="GET",
-                headers=headers)
-
-        log.msg("Performing GET request to %s via Tor" % self.url)
-        control_request = self.doRequest(self.url, method="GET",
-                use_tor=True, headers=headers)
-
-        l.append(experiment_request)
-        l.append(control_request)
-
-        dl = defer.DeferredList(l, consumeErrors=True)
-        dl.addCallback(callback)
-
-        return dl
-
diff --git a/nettests/blocking/tcpconnect.py b/nettests/blocking/tcpconnect.py
deleted file mode 100644
index 3b22427..0000000
--- a/nettests/blocking/tcpconnect.py
+++ /dev/null
@@ -1,45 +0,0 @@
-# -*- encoding: utf-8 -*-
-from twisted.internet.protocol import Factory, Protocol
-from twisted.internet.endpoints import TCP4ClientEndpoint
-
-from twisted.internet.error import ConnectionRefusedError
-from twisted.internet.error import TCPTimedOutError, TimeoutError
-
-from ooni import nettest
-from ooni.errors import handleAllFailures
-from ooni.utils import log
-
-class TCPFactory(Factory):
-    def buildProtocol(self, addr):
-        return Protocol()
-
-class TCPConnectTest(nettest.NetTestCase):
-    name = "TCP Connect"
-    author = "Arturo Filastò"
-    version = "0.1"
-    inputFile = ['file', 'f', None,
-            'File containing the IP:PORT combinations to be tested, one per line']
-
-    requiredOptions = ['file']
-    def test_connect(self):
-        """
-        This test performs a TCP connection to the remote host on the specified port.
-        the report will contains the string 'success' if the test has
-        succeeded, or the reason for the failure if it has failed.
-        """
-        host, port = self.input.split(":")
-        def connectionSuccess(protocol):
-            protocol.transport.loseConnection()
-            log.debug("Got a connection to %s" % self.input)
-            self.report["connection"] = 'success'
-
-        def connectionFailed(failure):
-            self.report['connection'] = handleAllFailures(failure)
-
-        from twisted.internet import reactor
-        point = TCP4ClientEndpoint(reactor, host, int(port))
-        d = point.connect(TCPFactory())
-        d.addCallback(connectionSuccess)
-        d.addErrback(connectionFailed)
-        return d
-
diff --git a/nettests/examples/__init__.py b/nettests/examples/__init__.py
deleted file mode 100644
index e69de29..0000000
diff --git a/nettests/examples/example_dns_http.py b/nettests/examples/example_dns_http.py
deleted file mode 100644
index 9b76775..0000000
--- a/nettests/examples/example_dns_http.py
+++ /dev/null
@@ -1,11 +0,0 @@
-from twisted.internet import defer
-from ooni.templates import httpt, dnst
-
-class TestDNSandHTTP(httpt.HTTPTest, dnst.DNSTest):
-
-    @defer.inlineCallbacks
-    def test_http_and_dns(self):
-        yield self.doRequest('http://torproject.org')
-        yield self.performALookup('torproject.org', ('8.8.8.8', 53))
-
-
diff --git a/nettests/examples/example_dnst.py b/nettests/examples/example_dnst.py
deleted file mode 100644
index 6905637..0000000
--- a/nettests/examples/example_dnst.py
+++ /dev/null
@@ -1,13 +0,0 @@
-from ooni.templates import dnst
-
-class ExampleDNSTest(dnst.DNSTest):
-    inputFile = ['file', 'f', None, 'foobar']
-
-    def test_a_lookup(self):
-        def gotResult(result):
-            # Result is an array containing all the A record lookup results
-            print result
-
-        d = self.performALookup('torproject.org', ('8.8.8.8', 53))
-        d.addCallback(gotResult)
-        return d
diff --git a/nettests/examples/example_http_checksum.py b/nettests/examples/example_http_checksum.py
deleted file mode 100644
index 9226b52..0000000
--- a/nettests/examples/example_http_checksum.py
+++ /dev/null
@@ -1,27 +0,0 @@
-# -*- encoding: utf-8 -*-
-#
-# :authors: Aaron Gibson
-# :licence: see LICENSE
-
-from ooni.utils import log
-from ooni.templates import httpt
-from hashlib import sha256
-
-class SHA256HTTPBodyTest(httpt.HTTPTest):
-    name = "ChecksumHTTPBodyTest"
-    author = "Aaron Gibson"
-    version = 0.1
-
-    inputFile = ['file', 'f', None, 
-            'List of URLS to perform GET requests to']
-
-    def test_http(self):
-        if self.input:
-            url = self.input
-            return self.doRequest(url)
-        else:
-            raise Exception("No input specified")
-
-    def processResponseBody(self, body):
-        body_sha256sum = sha256(body).digest()
-        self.report['checksum'] = body_sha256sum
diff --git a/nettests/examples/example_httpt.py b/nettests/examples/example_httpt.py
deleted file mode 100644
index e76aed4..0000000
--- a/nettests/examples/example_httpt.py
+++ /dev/null
@@ -1,36 +0,0 @@
-# -*- encoding: utf-8 -*-
-#
-# :authors: Arturo Filastò
-# :licence: see LICENSE
-
-from ooni.utils import log
-from ooni.templates import httpt
-
-class ExampleHTTP(httpt.HTTPTest):
-    name = "Example HTTP Test"
-    author = "Arturo Filastò"
-    version = 0.1
-
-    inputs = ['http://google.com/', 'http://wikileaks.org/',
-              'http://torproject.org/']
-
-    def test_http(self):
-        if self.input:
-            url = self.input
-            return self.doRequest(url)
-        else:
-            raise Exception("No input specified")
-
-    def processResponseBody(self, body):
-        # XXX here shall go your logic
-        #     for processing the body
-        if 'blocked' in body:
-            self.report['censored'] = True
-        else:
-            self.report['censored'] = False
-
-    def processResponseHeaders(self, headers):
-        # XXX place in here all the logic for handling the processing of HTTP
-        #     Headers.
-        pass
-
diff --git a/nettests/examples/example_myip.py b/nettests/examples/example_myip.py
deleted file mode 100644
index 70cf773..0000000
--- a/nettests/examples/example_myip.py
+++ /dev/null
@@ -1,21 +0,0 @@
-# -*- encoding: utf-8 -*-
-#
-# :authors: Arturo Filastò
-# :licence: see LICENSE
-
-from ooni.templates import httpt
-class MyIP(httpt.HTTPTest):
-    inputs = ['https://check.torproject.org']
-
-    def test_lookup(self):
-        return self.doRequest(self.input)
-
-    def processResponseBody(self, body):
-        import re
-        regexp = "Your IP address appears to be: <b>(.+?)<\/b>"
-        match = re.search(regexp, body)
-        try:
-            self.report['myip'] = match.group(1)
-        except:
-            self.report['myip'] = None
-
diff --git a/nettests/examples/example_scapyt.py b/nettests/examples/example_scapyt.py
deleted file mode 100644
index ba04072..0000000
--- a/nettests/examples/example_scapyt.py
+++ /dev/null
@@ -1,29 +0,0 @@
-# -*- encoding: utf-8 -*-
-#
-# :licence: see LICENSE
-
-from twisted.python import usage
-
-from scapy.all import IP, ICMP
-
-from ooni.templates import scapyt
-
-class UsageOptions(usage.Options):
-    optParameters = [['target', 't', '8.8.8.8', "Specify the target to ping"]]
-    
-class ExampleICMPPingScapy(scapyt.BaseScapyTest):
-    name = "Example ICMP Ping Test"
-
-    usageOptions = UsageOptions
-
-    def test_icmp_ping(self):
-        def finished(packets):
-            print packets
-            answered, unanswered = packets
-            for snd, rcv in answered:
-                rcv.show()
-
-        packets = IP(dst=self.localOptions['target'])/ICMP()
-        d = self.sr(packets)
-        d.addCallback(finished)
-        return d
diff --git a/nettests/examples/example_scapyt_yield.py b/nettests/examples/example_scapyt_yield.py
deleted file mode 100644
index 311b5aa..0000000
--- a/nettests/examples/example_scapyt_yield.py
+++ /dev/null
@@ -1,25 +0,0 @@
-# -*- encoding: utf-8 -*-
-#
-# :licence: see LICENSE
-
-from twisted.python import usage
-from twisted.internet import defer
-
-from scapy.all import IP, ICMP
-
-from ooni.templates import scapyt
-
-class UsageOptions(usage.Options):
-    optParameters = [['target', 't', self.localOptions['target'], "Specify the target to ping"]]
-
-class ExampleICMPPingScapyYield(scapyt.BaseScapyTest):
-    name = "Example ICMP Ping Test"
-
-    usageOptions = UsageOptions
-
-    @defer.inlineCallbacks
-    def test_icmp_ping(self):
-        packets = IP(dst=self.localOptions['target'])/ICMP()
-        answered, unanswered = yield self.sr(packets)
-        for snd, rcv in answered:
-            rcv.show()
diff --git a/nettests/examples/example_simple.py b/nettests/examples/example_simple.py
deleted file mode 100644
index 24de5a6..0000000
--- a/nettests/examples/example_simple.py
+++ /dev/null
@@ -1,8 +0,0 @@
-from twisted.internet import defer
-from ooni import nettest
-
-class MyIP(nettest.NetTestCase):
-    def test_simple(self):
-        self.report['foobar'] = 'antani'
-        return defer.succeed(42)
-
diff --git a/nettests/examples/example_tcpt.py b/nettests/examples/example_tcpt.py
deleted file mode 100644
index 613160b..0000000
--- a/nettests/examples/example_tcpt.py
+++ /dev/null
@@ -1,21 +0,0 @@
-
-from twisted.internet.error import ConnectionRefusedError
-from ooni.utils import log
-from ooni.templates import tcpt
-
-class ExampleTCPT(tcpt.TCPTest):
-    def test_hello_world(self):
-        def got_response(response):
-            print "Got this data %s" % response
-
-        def connection_failed(failure):
-            failure.trap(ConnectionRefusedError)
-            print "Connection Refused"
-
-        self.address = "127.0.0.1"
-        self.port = 57002
-        payload = "Hello World!\n\r"
-        d = self.sendPayload(payload)
-        d.addErrback(connection_failed)
-        d.addCallback(got_response)
-        return d
diff --git a/nettests/experimental/__init__.py b/nettests/experimental/__init__.py
deleted file mode 100644
index e69de29..0000000
diff --git a/nettests/experimental/bridge_reachability/bridget.py b/nettests/experimental/bridge_reachability/bridget.py
deleted file mode 100644
index acf3dff..0000000
--- a/nettests/experimental/bridge_reachability/bridget.py
+++ /dev/null
@@ -1,462 +0,0 @@
-#!/usr/bin/env python
-# -*- encoding: utf-8 -*-
-#
-#  +-----------+
-#  |  BRIDGET  |
-#  |        +--------------------------------------------+
-#  +--------| Use a Tor process to test making a Tor     |
-#           | connection to a list of bridges or relays. |
-#           +--------------------------------------------+
-#
-# :authors: Isis Lovecruft, Arturo Filasto
-# :licence: see included LICENSE
-# :version: 0.1.0-alpha
-
-from __future__           import with_statement
-from functools            import partial
-from random               import randint
-
-import os
-import sys
-
-from twisted.python       import usage
-from twisted.internet     import defer, error, reactor
-
-from ooni                 import nettest
-
-from ooni.utils           import log, date
-from ooni.utils.config    import ValueChecker
-
-from ooni.utils.onion     import TxtorconImportError
-from ooni.utils.onion     import PTNoBridgesException, PTNotFoundException
-
-
-try:
-    from ooni.utils.onion     import parse_data_dir
-except:
-    log.msg("Please go to /ooni/lib and do 'make txtorcon' to run this test!")
-
-class MissingAssetException(Exception):
-    pass
-
-class RandomPortException(Exception):
-    """Raised when using a random port conflicts with configured ports."""
-    def __init__(self):
-        log.msg("Unable to use random and specific ports simultaneously")
-        return sys.exit()
-
-class BridgetArgs(usage.Options):
-    """Commandline options."""
-    allowed = "Port to use for Tor's %s, must be between 1024 and 65535."
-    sock_check = ValueChecker(allowed % "SocksPort").port_check
-    ctrl_check = ValueChecker(allowed % "ControlPort").port_check
-
-    optParameters = [
-        ['bridges', 'b', None,
-         'File listing bridge IP:ORPorts to test'],
-        ['relays', 'f', None,
-         'File listing relay IPs to test'],
-        ['socks', 's', 9049, None, sock_check],
-        ['control', 'c', 9052, None, ctrl_check],
-        ['torpath', 'p', None,
-         'Path to the Tor binary to use'],
-        ['datadir', 'd', None,
-         'Tor DataDirectory to use'],
-        ['transport', 't', None,
-         'Tor ClientTransportPlugin'],
-        ['resume', 'r', 0,
-         'Resume at this index']]
-    optFlags = [['random', 'x', 'Use random ControlPort and SocksPort']]
-
-    def postOptions(self):
-        if not self['bridges'] and not self['relays']:
-            raise MissingAssetException(
-                "Bridget can't run without bridges or relays to test!")
-        if self['transport']:
-            ValueChecker.uid_check(
-                "Can't run bridget as root with pluggable transports!")
-            if not self['bridges']:
-                raise PTNoBridgesException
-        if self['socks'] or self['control']:
-            if self['random']:
-                raise RandomPortException
-        if self['datadir']:
-            ValueChecker.dir_check(self['datadir'])
-        if self['torpath']:
-            ValueChecker.file_check(self['torpath'])
-
-class BridgetTest(nettest.NetTestCase):
-    """
-    XXX fill me in
-
-    :ivar config:
-        An :class:`ooni.lib.txtorcon.TorConfig` instance.
-    :ivar relays:
-        A list of all provided relays to test.
-    :ivar bridges:
-        A list of all provided bridges to test.
-    :ivar socks_port:
-        Integer for Tor's SocksPort.
-    :ivar control_port:
-        Integer for Tor's ControlPort.
-    :ivar transport:
-        String defining the Tor's ClientTransportPlugin, for testing
-        a bridge's pluggable transport functionality.
-    :ivar tor_binary:
-        Path to the Tor binary to use, e.g. \'/usr/sbin/tor\'
-    """
-    name    = "bridget"
-    author  = "Isis Lovecruft <isis@xxxxxxxxxxxxxx>"
-    version = "0.1"
-    description   = "Use a Tor process to test connecting to bridges or relays"
-    usageOptions = BridgetArgs
-
-    def setUp(self):
-        """
-        Extra initialization steps. We only want one child Tor process
-        running, so we need to deal with most of the TorConfig() only once,
-        before the experiment runs.
-        """
-        self.socks_port      = 9049
-        self.control_port    = 9052
-        self.circuit_timeout = 90
-        self.tor_binary      = '/usr/sbin/tor'
-        self.data_directory  = None
-
-        def read_from_file(filename):
-            log.msg("Loading information from %s ..." % opt)
-            with open(filename) as fp:
-                lst = []
-                for line in fp.readlines():
-                    if line.startswith('#'):
-                        continue
-                    else:
-                        lst.append(line.replace('\n',''))
-                return lst
-
-        def __count_remaining__(which):
-            total, reach, unreach = map(lambda x: which[x],
-                                        ['all', 'reachable', 'unreachable'])
-            count = len(total) - reach() - unreach()
-            return count
-
-        ## XXX should we do report['bridges_up'].append(self.bridges['current'])
-        self.bridges = {}
-        self.bridges['all'], self.bridges['up'], self.bridges['down'] = \
-            ([] for i in range(3))
-        self.bridges['reachable']   = lambda: len(self.bridges['up'])
-        self.bridges['unreachable'] = lambda: len(self.bridges['down'])
-        self.bridges['remaining']   = lambda: __count_remaining__(self.bridges)
-        self.bridges['current']     = None
-        self.bridges['pt_type']     = None
-        self.bridges['use_pt']      = False
-
-        self.relays = {}
-        self.relays['all'], self.relays['up'], self.relays['down'] = \
-            ([] for i in range(3))
-        self.relays['reachable']   = lambda: len(self.relays['up'])
-        self.relays['unreachable'] = lambda: len(self.relays['down'])
-        self.relays['remaining']   = lambda: __count_remaining__(self.relays)
-        self.relays['current']     = None
-
-        if self.localOptions:
-            try:
-                from txtorcon import TorConfig
-            except ImportError:
-                raise TxtorconImportError
-            else:
-                self.config = TorConfig()
-            finally:
-                options = self.localOptions
-
-            if options['bridges']:
-                self.config.UseBridges = 1
-                self.bridges['all'] = read_from_file(options['bridges'])
-            if options['relays']:
-                ## first hop must be in TorState().guards
-                # XXX where is this defined?
-                self.config.EntryNodes = ','.join(relay_list)
-                self.relays['all'] = read_from_file(options['relays'])
-            if options['socks']:
-                self.socks_port = options['socks']
-            if options['control']:
-                self.control_port = options['control']
-            if options['random']:
-                log.msg("Using randomized ControlPort and SocksPort ...")
-                self.socks_port   = randint(1024, 2**16)
-                self.control_port = randint(1024, 2**16)
-            if options['torpath']:
-                self.tor_binary = options['torpath']
-            if options['datadir']:
-                self.data_directory = parse_data_dir(options['datadir'])
-            if options['transport']:
-                ## ClientTransportPlugin transport exec pathtobinary [options]
-                ## XXX we need a better way to deal with all PTs
-                log.msg("Using ClientTransportPlugin %s" % options['transport'])
-                self.bridges['use_pt'] = True
-                [self.bridges['pt_type'], pt_exec] = \
-                    options['transport'].split(' ', 1)
-
-                if self.bridges['pt_type'] == "obfs2":
-                    self.config.ClientTransportPlugin = \
-                        self.bridges['pt_type'] + " " + pt_exec
-                else:
-                    raise PTNotFoundException
-
-            self.config.SocksPort            = self.socks_port
-            self.config.ControlPort          = self.control_port
-            self.config.CookieAuthentication = 1
-
-    def test_bridget(self):
-        """
-        if bridges:
-            1. configure first bridge line
-            2a. configure data_dir, if it doesn't exist
-            2b. write torrc to a tempfile in data_dir
-            3. start tor                              } if any of these
-            4. remove bridges which are public relays } fail, add current
-            5. SIGHUP for each bridge                 } bridge to unreach-
-                                                      } able bridges.
-        if relays:
-            1a. configure the data_dir, if it doesn't exist
-            1b. write torrc to a tempfile in data_dir
-            2. start tor
-            3. remove any of our relays which are already part of current
-               circuits
-            4a. attach CustomCircuit() to self.state
-            4b. RELAY_EXTEND for each relay } if this fails, add
-                                            } current relay to list
-                                            } of unreachable relays
-            5.
-        if bridges and relays:
-            1. configure first bridge line
-            2a. configure data_dir if it doesn't exist
-            2b. write torrc to a tempfile in data_dir
-            3. start tor
-            4. remove bridges which are public relays
-            5. remove any of our relays which are already part of current
-               circuits
-            6a. attach CustomCircuit() to self.state
-            6b. for each bridge, build three circuits, with three
-                relays each
-            6c. RELAY_EXTEND for each relay } if this fails, add
-                                            } current relay to list
-                                            } of unreachable relays
-
-        :param args:
-            The :class:`BridgetAsset` line currently being used. Except that it
-            in Bridget it doesn't, so it should be ignored and avoided.
-        """
-        try:
-            from ooni.utils         import process
-            from ooni.utils.onion   import remove_public_relays, start_tor
-            from ooni.utils.onion   import start_tor_filter_nodes
-            from ooni.utils.onion   import setup_fail, setup_done
-            from ooni.utils.onion   import CustomCircuit
-            from ooni.utils.timer   import deferred_timeout, TimeoutError
-            from ooni.lib.txtorcon  import TorConfig, TorState
-        except ImportError:
-            raise TxtorconImportError
-        except TxtorconImportError, tie:
-            log.err(tie)
-            sys.exit()
-
-        def reconfigure_done(state, bridges):
-            """
-            Append :ivar:`bridges['current']` to the list
-            :ivar:`bridges['up'].
-            """
-            log.msg("Reconfiguring with 'Bridge %s' successful"
-                    % bridges['current'])
-            bridges['up'].append(bridges['current'])
-            return state
-
-        def reconfigure_fail(state, bridges):
-            """
-            Append :ivar:`bridges['current']` to the list
-            :ivar:`bridges['down'].
-            """
-            log.msg("Reconfiguring TorConfig with parameters %s failed"
-                    % state)
-            bridges['down'].append(bridges['current'])
-            return state
-
-        @defer.inlineCallbacks
-        def reconfigure_bridge(state, bridges):
-            """
-            Rewrite the Bridge line in our torrc. If use of pluggable
-            transports was specified, rewrite the line as:
-                Bridge <transport_type> <IP>:<ORPort>
-            Otherwise, rewrite in the standard form:
-                Bridge <IP>:<ORPort>
-
-            :param state:
-                A fully bootstrapped instance of
-                :class:`ooni.lib.txtorcon.TorState`.
-            :param bridges:
-                A dictionary of bridges containing the following keys:
-
-                bridges['remaining'] :: A function returning and int for the
-                                        number of remaining bridges to test.
-                bridges['current']   :: A string containing the <IP>:<ORPort>
-                                        of the current bridge.
-                bridges['use_pt']    :: A boolean, True if we're testing
-                                        bridges with a pluggable transport;
-                                        False otherwise.
-                bridges['pt_type']   :: If :ivar:`bridges['use_pt'] is True,
-                                        this is a string containing the type
-                                        of pluggable transport to test.
-            :return:
-                :param:`state`
-            """
-            log.msg("Current Bridge: %s" % bridges['current'])
-            log.msg("We now have %d bridges remaining to test..."
-                    % bridges['remaining']())
-            try:
-                if bridges['use_pt'] is False:
-                    controller_response = yield state.protocol.set_conf(
-                        'Bridge', bridges['current'])
-                elif bridges['use_pt'] and bridges['pt_type'] is not None:
-                    controller_reponse = yield state.protocol.set_conf(
-                        'Bridge', bridges['pt_type'] +' '+ bridges['current'])
-                else:
-                    raise PTNotFoundException
-
-                if controller_response == 'OK':
-                    finish = yield reconfigure_done(state, bridges)
-                else:
-                    log.err("SETCONF for %s responded with error:\n %s"
-                            % (bridges['current'], controller_response))
-                    finish = yield reconfigure_fail(state, bridges)
-
-                defer.returnValue(finish)
-
-            except Exception, e:
-                log.err("Reconfiguring torrc with Bridge line %s failed:\n%s"
-                        % (bridges['current'], e))
-                defer.returnValue(None)
-
-        def attacher_extend_circuit(attacher, deferred, router):
-            ## XXX todo write me
-            ## state.attacher.extend_circuit
-            raise NotImplemented
-            #attacher.extend_circuit
-
-        def state_attach(state, path):
-            log.msg("Setting up custom circuit builder...")
-            attacher = CustomCircuit(state)
-            state.set_attacher(attacher, reactor)
-            state.add_circuit_listener(attacher)
-            return state
-
-            ## OLD
-            #for circ in state.circuits.values():
-            #    for relay in circ.path:
-            #        try:
-            #            relay_list.remove(relay)
-            #        except KeyError:
-            #            continue
-            ## XXX how do we attach to circuits with bridges?
-            d = defer.Deferred()
-            attacher.request_circuit_build(d)
-            return d
-
-        def state_attach_fail(state):
-            log.err("Attaching custom circuit builder failed: %s" % state)
-
-        log.msg("Bridget: initiating test ... ")  ## Start the experiment
-
-        ## if we've at least one bridge, and our config has no 'Bridge' line
-        if self.bridges['remaining']() >= 1 \
-                and not 'Bridge' in self.config.config:
-
-            ## configure our first bridge line
-            self.bridges['current'] = self.bridges['all'][0]
-            self.config.Bridge = self.bridges['current']
-                                                  ## avoid starting several
-            self.config.save()                    ## processes
-            assert self.config.config.has_key('Bridge'), "No Bridge Line"
-
-            ## start tor and remove bridges which are public relays
-            from ooni.utils.onion import start_tor_filter_nodes
-            state = start_tor_filter_nodes(reactor, self.config,
-                                           self.control_port, self.tor_binary,
-                                           self.data_directory, self.bridges)
-            #controller = defer.Deferred()
-            #controller.addCallback(singleton_semaphore, tor)
-            #controller.addErrback(setup_fail)
-            #bootstrap = defer.gatherResults([controller, filter_bridges],
-            #                                consumeErrors=True)
-
-            if state is not None:
-                log.debug("state:\n%s" % state)
-                log.debug("Current callbacks on TorState():\n%s"
-                          % state.callbacks)
-
-        ## if we've got more bridges
-        if self.bridges['remaining']() >= 2:
-            #all = []
-            for bridge in self.bridges['all'][1:]:
-                self.bridges['current'] = bridge
-                #new = defer.Deferred()
-                #new.addCallback(reconfigure_bridge, state, self.bridges)
-                #all.append(new)
-            #check_remaining = defer.DeferredList(all, consumeErrors=True)
-            #state.chainDeferred(check_remaining)
-                state.addCallback(reconfigure_bridge, self.bridges)
-
-        if self.relays['remaining']() > 0:
-            while self.relays['remaining']() >= 3:
-                #path = list(self.relays.pop() for i in range(3))
-                #log.msg("Trying path %s" % '->'.join(map(lambda node:
-                #                                         node, path)))
-                self.relays['current'] = self.relays['all'].pop()
-                for circ in state.circuits.values():
-                    for node in circ.path:
-                        if node == self.relays['current']:
-                            self.relays['up'].append(self.relays['current'])
-                    if len(circ.path) < 3:
-                        try:
-                            ext = attacher_extend_circuit(state.attacher, circ,
-                                                          self.relays['current'])
-                            ext.addCallback(attacher_extend_circuit_done,
-                                            state.attacher, circ,
-                                            self.relays['current'])
-                        except Exception, e:
-                            log.err("Extend circuit failed: %s" % e)
-                    else:
-                        continue
-
-        #state.callback(all)
-        #self.reactor.run()
-        return state
-
-    def disabled_startTest(self, args):
-        """
-        Local override of :meth:`OONITest.startTest` to bypass calling
-        self.control.
-
-        :param args:
-            The current line of :class:`Asset`, not used but kept for
-            compatibility reasons.
-        :return:
-            A fired deferred which callbacks :meth:`experiment` and
-            :meth:`OONITest.finished`.
-        """
-        self.start_time = date.now()
-        self.d = self.experiment(args)
-        self.d.addErrback(log.err)
-        self.d.addCallbacks(self.finished, log.err)
-        return self.d
-
-## ISIS' NOTES
-## -----------
-## TODO:
-##       x  cleanup documentation
-##       x  add DataDirectory option
-##       x  check if bridges are public relays
-##       o  take bridge_desc file as input, also be able to give same
-##          format as output
-##       x  Add asynchronous timeout for deferred, so that we don't wait
-##       o  Add assychronous timout for deferred, so that we don't wait
-##          forever for bridges that don't work.
diff --git a/nettests/experimental/bridge_reachability/echo.py b/nettests/experimental/bridge_reachability/echo.py
deleted file mode 100644
index d4033dd..0000000
--- a/nettests/experimental/bridge_reachability/echo.py
+++ /dev/null
@@ -1,132 +0,0 @@
-#!/usr/bin/env python
-# -*- coding: utf-8 -*-
-#
-#  +---------+
-#  | echo.py |
-#  +---------+
-#     A simple ICMP-8 ping test.
-#
-# @authors: Isis Lovecruft, <isis@xxxxxxxxxxxxxx>
-# @version: 0.0.2-pre-alpha
-# @license: copyright (c) 2012 Isis Lovecruft
-#           see attached LICENCE file
-#
-
-import os
-import sys
-
-from twisted.python   import usage
-from twisted.internet import reactor, defer
-from ooni             import nettest
-from ooni.utils       import log, net, Storage, txscapy
-
-try:
-    from scapy.all             import IP, ICMP
-    from scapy.all             import sr1
-    from ooni.lib              import txscapy
-    from ooni.lib.txscapy      import txsr, txsend
-    from ooni.templates.scapyt import BaseScapyTest
-except:
-    log.msg("This test requires scapy, see www.secdev.org/projects/scapy")
-
-class UsageOptions(usage.Options):
-    optParameters = [
-        ['dst', 'd', None, 'Host IP to ping'],
-        ['file', 'f', None, 'File of list of IPs to ping'],
-        ['interface', 'i', None, 'Network interface to use'],
-        ['count', 'c', 1, 'Number of packets to send', int],
-        ['size', 's', 56, 'Number of bytes to send in ICMP data field', int],
-        ['ttl', 'l', 25, 'Set the IP Time to Live', int],
-        ['timeout', 't', 2, 'Seconds until timeout if no response', int],
-        ['pcap', 'p', None, 'Save pcap to this file'],
-        ['receive', 'r', True, 'Receive response packets']]
-
-class EchoTest(nettest.NetTestCase):
-    """
-    xxx fill me in
-    """
-    name         = 'echo'
-    author       = 'Isis Lovecruft <isis@xxxxxxxxxxxxxx>'
-    description  = 'A simple ping test to see if a host is reachable.'
-    version      = '0.0.2'
-    requiresRoot = True
-
-    usageOptions    = UsageOptions
-    #requiredOptions = ['dst']
-
-    def setUp(self, *a, **kw):
-        self.destinations = {}
-
-        if self.localOptions:
-            for key, value in self.localOptions.items():
-                log.debug("setting self.%s = %s" % (key, value))
-                setattr(self, key, value)
-
-        self.timeout *= 1000            ## convert to milliseconds
-
-        if not self.interface:
-            try:
-                iface = txscapy.getDefaultIface()
-            except Exception, e:
-                log.msg("No network interface specified!")
-                log.err(e)
-            else:
-                log.msg("Using system default interface: %s" % iface)
-                self.interface = iface
-
-        if self.pcap:
-            try:
-                self.pcapfile = open(self.pcap, 'a+')
-            except:
-                log.msg("Unable to write to pcap file %s" % self.pcap)
-            else:
-                self.pcap = net.capturePacket(self.pcapfile)
-
-        if not self.dst:
-            if self.file:
-                self.dstProcessor(self.file)
-                for key, value in self.destinations.items():
-                    for label, data in value.items():
-                        if not 'ans' in data:
-                            self.dst = label
-        else:
-            self.addDest(self.dst)
-        log.debug("self.dst is now: %s" % self.dst)
-
-        log.debug("Initialization of %s test completed." % self.name)
-
-    def addDest(self, dest):
-        d = dest.strip()
-        self.destinations[d] = {'dst_ip': d}
-
-    def dstProcessor(self, inputfile):
-        from ipaddr import IPAddress
-
-        if os.path.isfile(inputfile):
-            with open(inputfile) as f:
-                for line in f.readlines():
-                    if line.startswith('#'):
-                        continue
-                    self.addDest(line)
-
-    def test_icmp(self):
-        def process_response(echo_reply, dest):
-           ans, unans = echo_reply
-           if ans:
-               log.msg("Recieved echo reply from %s: %s" % (dest, ans))
-           else:
-               log.msg("No reply was received from %s. Possible censorship event." % dest)
-               log.debug("Unanswered packets: %s" % unans)
-           self.report[dest] = echo_reply
-
-        for label, data in self.destinations.items():
-            reply = sr1(IP(dst=lebal)/ICMP())
-            process = process_reponse(reply, label)
-
-        #(ans, unans) = ping
-        #self.destinations[self.dst].update({'ans': ans,
-        #                                    'unans': unans,
-        #                                    'response_packet': ping})
-        #return ping
-
-        #return reply
diff --git a/nettests/experimental/chinatrigger.py b/nettests/experimental/chinatrigger.py
deleted file mode 100644
index de1f64d..0000000
--- a/nettests/experimental/chinatrigger.py
+++ /dev/null
@@ -1,108 +0,0 @@
-import random
-import string
-import struct
-import time
-
-from twisted.python import usage
-from ooni.templates.scapyt import BaseScapyTest
-
-class UsageOptions(usage.Options):
-    optParameters = [['dst', 'd', None, 'Specify the target address'],
-                     ['port', 'p', None, 'Specify the target port']
-                    ]
-
-class ChinaTriggerTest(BaseScapyTest):
-    """
-    This test is a OONI based implementation of the C tool written
-    by Philipp Winter to engage chinese probes in active scanning.
-
-    Example of running it:
-    ./bin/ooniprobe chinatrigger -d 127.0.0.1 -p 8080
-    """
-
-    name = "chinatrigger"
-    usageOptions = UsageOptions
-    requiredOptions = ['dst', 'port']
-    timeout = 2
-
-    def setUp(self):
-        self.dst = self.localOptions['dst']
-        self.port = int(self.localOptions['port'])
-
-    @staticmethod
-    def set_random_servername(pkt):
-        ret = pkt[:121]
-        for i in range(16):
-            ret += random.choice(string.ascii_lowercase)
-        ret += pkt[121+16:]
-        return ret
-
-    @staticmethod
-    def set_random_time(pkt):
-        ret = pkt[:11]
-        ret += struct.pack('!I', int(time.time()))
-        ret += pkt[11+4:]
-        return ret
-
-    @staticmethod
-    def set_random_field(pkt):
-        ret = pkt[:15]
-        for i in range(28):
-            ret += chr(random.randint(0, 255))
-        ret += pkt[15+28:]
-        return ret
-
-    @staticmethod
-    def mutate(pkt, idx):
-        """
-        Slightly changed mutate function.
-        """
-        ret = pkt[:idx-1]
-        mutation = chr(random.randint(0, 255))
-        while mutation == pkt[idx]:
-            mutation = chr(random.randint(0, 255))
-        ret += mutation
-        ret += pkt[idx:]
-        return ret
-
-    @staticmethod
-    def set_all_random_fields(pkt):
-        pkt = ChinaTriggerTest.set_random_servername(pkt)
-        pkt = ChinaTriggerTest.set_random_time(pkt)
-        pkt = ChinaTriggerTest.set_random_field(pkt)
-        return pkt
-
-    def test_send_mutations(self):
-        from scapy.all import IP, TCP
-        pkt = "\x16\x03\x01\x00\xcc\x01\x00\x00\xc8"\
-              "\x03\x01\x4f\x12\xe5\x63\x3f\xef\x7d"\
-              "\x20\xb9\x94\xaa\x04\xb0\xc1\xd4\x8c"\
-              "\x50\xcd\xe2\xf9\x2f\xa9\xfb\x78\xca"\
-              "\x02\xa8\x73\xe7\x0e\xa8\xf9\x00\x00"\
-              "\x3a\xc0\x0a\xc0\x14\x00\x39\x00\x38"\
-              "\xc0\x0f\xc0\x05\x00\x35\xc0\x07\xc0"\
-              "\x09\xc0\x11\xc0\x13\x00\x33\x00\x32"\
-              "\xc0\x0c\xc0\x0e\xc0\x02\xc0\x04\x00"\
-              "\x04\x00\x05\x00\x2f\xc0\x08\xc0\x12"\
-              "\x00\x16\x00\x13\xc0\x0d\xc0\x03\xfe"\
-              "\xff\x00\x0a\x00\xff\x01\x00\x00\x65"\
-              "\x00\x00\x00\x1d\x00\x1b\x00\x00\x18"\
-              "\x77\x77\x77\x2e\x67\x6e\x6c\x69\x67"\
-              "\x78\x7a\x70\x79\x76\x6f\x35\x66\x76"\
-              "\x6b\x64\x2e\x63\x6f\x6d\x00\x0b\x00"\
-              "\x04\x03\x00\x01\x02\x00\x0a\x00\x34"\
-              "\x00\x32\x00\x01\x00\x02\x00\x03\x00"\
-              "\x04\x00\x05\x00\x06\x00\x07\x00\x08"\
-              "\x00\x09\x00\x0a\x00\x0b\x00\x0c\x00"\
-              "\x0d\x00\x0e\x00\x0f\x00\x10\x00\x11"\
-              "\x00\x12\x00\x13\x00\x14\x00\x15\x00"\
-              "\x16\x00\x17\x00\x18\x00\x19\x00\x23"\
-              "\x00\x00"
-
-        pkt = ChinaTriggerTest.set_all_random_fields(pkt)
-        pkts = [IP(dst=self.dst)/TCP(dport=self.port)/pkt]
-        for x in range(len(pkt)):
-            mutation = IP(dst=self.dst)/TCP(dport=self.port)/ChinaTriggerTest.mutate(pkt, x)
-            pkts.append(mutation)
-        return self.sr(pkts, timeout=2)
-
diff --git a/nettests/experimental/dns_injection.py b/nettests/experimental/dns_injection.py
deleted file mode 100644
index 97233cf..0000000
--- a/nettests/experimental/dns_injection.py
+++ /dev/null
@@ -1,63 +0,0 @@
-# -*- encoding: utf-8 -*-
-from twisted.python import usage
-from twisted.internet import defer
-
-from ooni.templates import dnst
-from ooni import nettest
-from ooni.utils import log
-
-class UsageOptions(usage.Options):
-    optParameters = [
-            ['resolver', 'r', '8.8.8.1', 'an invalid DNS resolver'],
-            ['timeout', 't', 3, 'timeout after which we should consider the query failed']
-    ]
-
-class DNSInjectionTest(dnst.DNSTest):
-    """
-    This test detects DNS spoofed DNS responses by performing UDP based DNS
-    queries towards an invalid DNS resolver.
-
-    For it to work we must be traversing the network segment of a machine that
-    is actively injecting DNS query answers.
-    """
-    name = "DNS Injection"
-    description = "Checks for injection of spoofed DNS answers"
-    version = "0.1"
-    authors = "Arturo Filastò"
-
-    inputFile = ['file', 'f', None,
-                 'Input file of list of hostnames to attempt to resolve']
-
-    usageOptions = UsageOptions
-    requiredOptions = ['resolver', 'file']
-
-    def setUp(self):
-        self.resolver = (self.localOptions['resolver'], 53)
-        self.queryTimeout = [self.localOptions['timeout']]
-
-    def inputProcessor(self, filename):
-        fp = open(filename)
-        for line in fp:
-            if line.startswith('http://'):
-                yield line.replace('http://', '').replace('/', '').strip()
-            else:
-                yield line.strip()
-        fp.close()
-
-    def test_injection(self):
-        self.report['injected'] = None
-
-        d = self.performALookup(self.input, self.resolver)
-        @d.addCallback
-        def cb(res):
-            log.msg("The DNS query for %s is injected" % self.input)
-            self.report['injected'] = True
-
-        @d.addErrback
-        def err(err):
-            err.trap(defer.TimeoutError)
-            log.msg("The DNS query for %s is not injected" % self.input)
-            self.report['injected'] = False
-
-        return d
-
diff --git a/nettests/experimental/domclass_collector.py b/nettests/experimental/domclass_collector.py
deleted file mode 100644
index c1866f2..0000000
--- a/nettests/experimental/domclass_collector.py
+++ /dev/null
@@ -1,33 +0,0 @@
-# -*- encoding: utf-8 -*-
-#
-# The purpose of this collector is to compute the eigenvector for the input
-# file containing a list of sites.
-#
-#
-# :authors: Arturo Filastò
-# :licence: see LICENSE
-
-from twisted.internet import threads, defer
-
-from ooni.kit import domclass
-from ooni.templates import httpt
-
-class DOMClassCollector(httpt.HTTPTest):
-    name = "DOM class collector"
-    author = "Arturo Filastò"
-    version = 0.1
-
-    followRedirects = True
-
-    inputFile = ['file', 'f', None, 'The list of urls to build a domclass for']
-
-    def test_collect(self):
-        if self.input:
-            url = self.input
-            return self.doRequest(url)
-        else:
-            raise Exception("No input specified")
-
-    def processResponseBody(self, body):
-        eigenvalues = domclass.compute_eigenvalues_from_DOM(content=body)
-        self.report['eigenvalues'] = eigenvalues.tolist()
diff --git a/nettests/experimental/http_filtering_bypassing.py b/nettests/experimental/http_filtering_bypassing.py
deleted file mode 100644
index dc103db..0000000
--- a/nettests/experimental/http_filtering_bypassing.py
+++ /dev/null
@@ -1,84 +0,0 @@
-# -*- encoding: utf-8 -*-
-from twisted.python import usage
-
-from ooni.utils import log
-from ooni.utils import randomStr, randomSTR
-from ooni.templates import tcpt
-
-class UsageOptions(usage.Options):
-    optParameters = [['backend', 'b', '127.0.0.1',
-                        'The OONI backend that runs a TCP echo server'],
-                    ['backendport', 'p', 80, 'Specify the port that the TCP echo server is running (should only be set for debugging)']]
-
-class HTTPFilteringBypass(tcpt.TCPTest):
-    name = "HTTPFilteringBypass"
-    version = "0.1"
-    authors = "xx"
-
-    inputFile = ['file', 'f', None,
-            'Specify a list of hostnames to use as inputs']
-
-    usageOptions = UsageOptions
-    requiredOptions = ['backend']
-
-    def setUp(self):
-        self.port = int(self.localOptions['backendport'])
-        self.address = self.localOptions['backend']
-
-    def check_for_manipulation(self, response, payload):
-        log.debug("Checking if %s == %s" % (response, payload))
-        if response != payload:
-            self.report['tampering'] = True
-        else:
-            self.report['tampering'] = False
-
-    def test_prepend_newline(self):
-        payload = "\nGET / HTTP/1.1\n\r"
-        payload += "Host: %s\n\r" % self.input
-
-        d = self.sendPayload(payload)
-        d.addCallback(self.check_for_manipulation, payload)
-        return d
-
-    def test_tab_trick(self):
-        payload = "GET / HTTP/1.1\n\r"
-        payload += "Host: %s\t\n\r" % self.input
-
-        d = self.sendPayload(payload)
-        d.addCallback(self.check_for_manipulation, payload)
-        return d
-
-    def test_subdomain_blocking(self):
-        payload = "GET / HTTP/1.1\n\r"
-        payload += "Host: %s\n\r" % randomStr(10) + '.' + self.input
-
-        d = self.sendPayload(payload)
-        d.addCallback(self.check_for_manipulation, payload)
-        return d
-
-    def test_fuzzy_domain_blocking(self):
-        hostname_field = randomStr(10) + '.' + self.input + '.' + randomStr(10)
-        payload = "GET / HTTP/1.1\n\r"
-        payload += "Host: %s\n\r" % hostname_field
-
-        d = self.sendPayload(payload)
-        d.addCallback(self.check_for_manipulation, payload)
-        return d
-
-    def test_fuzzy_match_blocking(self):
-        hostname_field = randomStr(10) + self.input + randomStr(10)
-        payload = "GET / HTTP/1.1\n\r"
-        payload += "Host: %s\n\r" % hostname_field
-
-        d = self.sendPayload(payload)
-        d.addCallback(self.check_for_manipulation, payload)
-        return d
-
-    def test_normal_request(self):
-        payload = "GET / HTTP/1.1\n\r"
-        payload += "Host: %s\n\r" % self.input
-
-        d = self.sendPayload(payload)
-        d.addCallback(self.check_for_manipulation, payload)
-        return d
-
diff --git a/nettests/experimental/http_keyword_filtering.py b/nettests/experimental/http_keyword_filtering.py
deleted file mode 100644
index 0ae9c52..0000000
--- a/nettests/experimental/http_keyword_filtering.py
+++ /dev/null
@@ -1,45 +0,0 @@
-# -*- encoding: utf-8 -*-
-#
-# :authors: Arturo Filastò
-# :licence: see LICENSE
-
-from twisted.python import usage
-
-from ooni.templates import httpt
-
-class UsageOptions(usage.Options):
-    optParameters = [['backend', 'b', 'http://127.0.0.1:57001',
-                        'URL of the test backend to use']]
-
-class HTTPKeywordFiltering(httpt.HTTPTest):
-    """
-    This test involves performing HTTP requests containing to be tested for
-    censorship keywords.
-
-    It does not detect censorship on the client, but just logs the response from the 
-    HTTP backend server.
-    """
-    name = "HTTP Keyword Filtering"
-    author = "Arturo Filastò"
-    version = "0.1.1"
-
-    inputFile = ['file', 'f', None, 'List of keywords to use for censorship testing']
-
-    usageOptions = UsageOptions
-
-    requiredOptions = ['backend']
-
-    def test_get(self):
-        """
-        Perform a HTTP GET request to the backend containing the keyword to be
-        tested inside of the request body.
-        """
-        return self.doRequest(self.localOptions['backend'], method="GET", body=self.input)
-
-    def test_post(self):
-        """
-        Perform a HTTP POST request to the backend containing the keyword to be
-        tested inside of the request body.
-        """
-        return self.doRequest(self.localOptions['backend'], method="POST", body=self.input)
-
diff --git a/nettests/experimental/http_trix.py b/nettests/experimental/http_trix.py
deleted file mode 100644
index 85a4ba2..0000000
--- a/nettests/experimental/http_trix.py
+++ /dev/null
@@ -1,47 +0,0 @@
-# -*- encoding: utf-8 -*-
-from twisted.python import usage
-
-from ooni.utils import log
-from ooni.utils import randomStr, randomSTR
-from ooni.templates import tcpt
-
-class UsageOptions(usage.Options):
-    optParameters = [['backend', 'b', '127.0.0.1',
-                        'The OONI backend that runs a TCP echo server'],
-                    ['backendport', 'p', 80, 'Specify the port that the TCP echo server is running (should only be set for debugging)']]
-
-class HTTPTrix(tcpt.TCPTest):
-    name = "HTTPTrix"
-    version = "0.1"
-    authors = "Arturo Filastò"
-
-    usageOptions = UsageOptions
-    requiredOptions = ['backend']
-
-    def setUp(self):
-        self.port = int(self.localOptions['backendport'])
-        self.address = self.localOptions['backend']
-
-    def check_for_manipulation(self, response, payload):
-        log.debug("Checking if %s == %s" % (response, payload))
-        if response != payload:
-            self.report['tampering'] = True
-        else:
-            self.report['tampering'] = False
-
-    def test_for_squid_cache_object(self):
-        """
-        This detects the presence of a squid transparent HTTP proxy by sending
-        a request for cache_object://localhost/info.
-
-        This tests for the presence of a Squid Transparent proxy by sending:
-
-            GET cache_object://localhost/info HTTP/1.1
-        """
-        payload = 'GET cache_object://localhost/info HTTP/1.1'
-        payload += '\n\r'
-
-        d = self.sendPayload(payload)
-        d.addCallback(self.check_for_manipulation, payload)
-        return d
-
diff --git a/nettests/experimental/http_uk_mobile_networks.py b/nettests/experimental/http_uk_mobile_networks.py
deleted file mode 100644
index 784a9e9..0000000
--- a/nettests/experimental/http_uk_mobile_networks.py
+++ /dev/null
@@ -1,85 +0,0 @@
-# -*- encoding: utf-8 -*-
-import yaml
-
-from twisted.python import usage
-from twisted.plugin import IPlugin
-
-from ooni.templates import httpt
-from ooni.utils import log
-
-class UsageOptions(usage.Options):
-    """
-    See https://github.com/hellais/ooni-inputs/processed/uk_mobile_networks_redirects.yaml 
-    to see how the rules file should look like.
-    """
-    optParameters = [
-                     ['rules', 'y', None, 
-                    'Specify the redirect rules file ']
-                    ]
-
-class HTTPUKMobileNetworksTest(httpt.HTTPTest):
-    """
-    This test was thought of by Open Rights Group and implemented with the
-    purpose of detecting censorship in the UK.
-    For more details on this test see:
-    https://trac.torproject.org/projects/tor/ticket/6437
-    XXX port the knowledge from the trac ticket into this test docstring
-    """
-    name = "HTTP UK mobile network redirect test"
-
-    usageOptions = UsageOptions
-
-    followRedirects = True
-
-    inputFile = ['urls', 'f', None, 'List of urls one per line to test for censorship']
-    requiredOptions = ['urls']
-
-    def testPattern(self, value, pattern, type):
-        if type == 'eq':
-            return value == pattern
-        elif type == 're':
-            import re
-            if re.match(pattern, value):
-                return True
-            else:
-                return False
-        else:
-            return None
-
-    def testPatterns(self, patterns, location):
-        test_result = False
-
-        if type(patterns) == list:
-            for pattern in patterns:
-                test_result |= self.testPattern(location, pattern['value'], pattern['type'])
-        rules_file = self.localOptions['rules']
-
-        return test_result
-
-    def testRules(self, rules, location):
-        result = {}
-        blocked = False
-        for rule, value in rules.items():
-            current_rule = {}
-            current_rule['name'] = value['name']
-            current_rule['patterns'] = value['patterns']
-            current_rule['test'] = self.testPatterns(value['patterns'], location)
-            blocked |= current_rule['test']
-            result[rule] = current_rule
-        result['blocked'] = blocked
-        return result
-
-    def processRedirect(self, location):
-        self.report['redirect'] = None
-        rules_file = self.localOptions['rules']
-
-        fp = open(rules_file)
-        rules = yaml.safe_load(fp)
-        fp.close()
-
-        log.msg("Testing rules %s" % rules)
-        redirect = self.testRules(rules, location)
-        self.report['redirect'] = redirect
-
-
-
diff --git a/nettests/experimental/keyword_filtering.py b/nettests/experimental/keyword_filtering.py
deleted file mode 100644
index 9eec4ff..0000000
--- a/nettests/experimental/keyword_filtering.py
+++ /dev/null
@@ -1,52 +0,0 @@
-# -*- encoding: utf-8 -*-
-#
-# :authors: Arturo Filastò
-# :licence: see LICENSE
-
-from twisted.python import usage
-from twisted.internet import defer
-
-from ooni.utils import log
-from ooni.templates import scapyt
-
-from scapy.all import *
-
-class UsageOptions(usage.Options):
-    optParameters = [
-                    ['backend', 'b', '127.0.0.1:57002', 'Test backend running TCP echo'],
-                    ['timeout', 't', 5, 'Timeout after which to give up waiting for RST packets']
-                    ]
-
-class KeywordFiltering(scapyt.BaseScapyTest):
-    name = "Keyword Filtering detection based on RST packets"
-    author = "Arturo Filastò"
-    version = "0.1"
-
-    usageOptions = UsageOptions
-
-    inputFile = ['file', 'f', None, 
-            'List of keywords to use for censorship testing']
-
-    def test_tcp_keyword_filtering(self):
-        """
-        Places the keyword to be tested in the payload of a TCP packet.
-        XXX need to implement bisection method for enumerating keywords.
-            though this should not be an issue since we are testing all 
-            the keywords in parallel.
-        """
-        def finished(packets):
-            log.debug("Finished running TCP traceroute test on port %s" % port)
-            answered, unanswered = packets
-            self.report['rst_packets'] = []
-            for snd, rcv in answered:
-                # The received packet has the RST flag
-                if rcv[TCP].flags == 4:
-                    self.report['rst_packets'].append(rcv)
-
-        backend_ip, backend_port = self.localOptions['backend']
-        keyword_to_test = str(self.input)
-        packets = IP(dst=backend_ip,id=RandShort())/TCP(dport=backend_port)/keyword_to_test
-        d = self.sr(packets, timeout=timeout)
-        d.addCallback(finished)
-        return d
-
diff --git a/nettests/experimental/parasitictraceroute.py b/nettests/experimental/parasitictraceroute.py
deleted file mode 100644
index 631c24b..0000000
--- a/nettests/experimental/parasitictraceroute.py
+++ /dev/null
@@ -1,129 +0,0 @@
-# -*- encoding: utf-8 -*-
-#
-# :authors: Arturo Filastò
-# :licence: see LICENSE
-
-from twisted.python import usage
-from twisted.internet import defer
-
-from ooni.templates import scapyt
-
-from scapy.all import *
-
-from ooni.utils import log
-
-class UsageOptions(usage.Options):
-    optParameters = [['backend', 'b', 'google.com', 'Test backend to use'],
-                    ['timeout', 't', 5, 'The timeout for the traceroute test'],
-                    ['maxttl', 'm', 64, 'The maximum value of ttl to set on packets'],
-                    ['dstport', 'd', 80, 'Set the destination port of the traceroute test'],
-                    ['srcport', 'p', None, 'Set the source port to a specific value']]
-
-class ParasiticalTracerouteTest(scapyt.BaseScapyTest):
-    name = "Parasitic TCP Traceroute Test"
-    author = "Arturo Filastò"
-    version = "0.1"
-
-    usageOptions = UsageOptions
-
-    def setUp(self):
-        def get_sport():
-            if self.localOptions['srcport']:
-                return int(self.localOptions['srcport'])
-            else:
-                return random.randint(1024, 65535)
-        self.get_sport = get_sport
-
-        self.dst_ip = socket.gethostbyaddr(self.localOptions['backend'])[2][0]
-
-        self.dport = int(self.localOptions['dstport'])
-        self.max_ttl = int(self.localOptions['maxttl'])
-
-    @defer.inlineCallbacks
-    def test_parasitic_tcp_traceroute(self):
-        """
-        Establishes a TCP stream, then sequentially sends TCP packets with
-        increasing TTL until we reach the ttl of the destination.
-
-        Requires the backend to respond with an ACK to our SYN packet (i.e.
-        the port must be open)
-
-        XXX this currently does not work properly. The problem lies in the fact
-        that we are currently using the scapy layer 3 socket. This socket makes
-        packets received be trapped by the kernel TCP stack, therefore when we
-        send out a SYN and get back a SYN-ACK the kernel stack will reply with
-        a RST because it did not send a SYN.
-
-        The quick fix to this would be to establish a TCP stream using socket
-        calls and then "cannibalizing" the TCP session with scapy.
-
-        The real fix is to make scapy use libpcap instead of raw sockets
-        obviously as we previously did... arg.
-        """
-        sport = self.get_sport()
-        dport = self.dport
-        ipid = int(RandShort())
-
-        ip_layer = IP(dst=self.dst_ip,
-                id=ipid, ttl=self.max_ttl)
-
-        syn = ip_layer/TCP(sport=sport, dport=dport, flags="S", seq=0)
-
-        log.msg("Sending...")
-        syn.show2()
-
-        synack = yield self.sr1(syn)
-
-        log.msg("Got response...")
-        synack.show2()
-
-        if not synack:
-            log.err("Got no response. Try increasing max_ttl")
-            return
-
-        if synack[TCP].flags == 11:
-            log.msg("Got back a FIN ACK. The destination port is closed")
-            return
-
-        elif synack[TCP].flags == 18:
-            log.msg("Got a SYN ACK. All is well.")
-        else:
-            log.err("Got an unexpected result")
-            return
-
-        ack = ip_layer/TCP(sport=synack.dport,
-                            dport=dport, flags="A",
-                            seq=synack.ack, ack=synack.seq + 1)
-
-        yield self.send(ack)
-
-        self.report['hops'] = []
-        # For the time being we make the assumption that we are NATted and
-        # that the NAT will forward the packet to the destination even if the TTL has 
-        for ttl in range(1, self.max_ttl):
-            log.msg("Sending packet with ttl of %s" % ttl)
-            ip_layer.ttl = ttl
-            empty_tcp_packet = ip_layer/TCP(sport=synack.dport,
-                    dport=dport, flags="A",
-                    seq=synack.ack, ack=synack.seq + 1)
-
-            answer = yield self.sr1(empty_tcp_packet)
-            if not answer:
-                log.err("Got no response for ttl %s" % ttl)
-                continue
-
-            try:
-                icmp = answer[ICMP]
-                report = {'ttl': empty_tcp_packet.ttl,
-                    'address': answer.src,
-                    'rtt': answer.time - empty_tcp_packet.time
-                }
-                log.msg("%s: %s" % (dport, report))
-                self.report['hops'].append(report)
-
-            except IndexError:
-                if answer.src == self.dst_ip:
-                    answer.show()
-                    log.msg("Reached the destination. We have finished the traceroute")
-                    return
-
diff --git a/nettests/experimental/squid.py b/nettests/experimental/squid.py
deleted file mode 100644
index 777bc3e..0000000
--- a/nettests/experimental/squid.py
+++ /dev/null
@@ -1,117 +0,0 @@
-# -*- encoding: utf-8 -*-
-#
-# Squid transparent HTTP proxy detector
-# *************************************
-#
-# :authors: Arturo Filastò
-# :licence: see LICENSE
-
-from ooni import utils
-from ooni.utils import log
-from ooni.templates import httpt
-
-class SquidTest(httpt.HTTPTest):
-    """
-    This test aims at detecting the presence of a squid based transparent HTTP
-    proxy. It also tries to detect the version number.
-    """
-    name = "Squid test"
-    author = "Arturo Filastò"
-    version = "0.1"
-
-    optParameters = [['backend', 'b', 'http://ooni.nu/test/', 'Test backend to use']]
-
-    #inputFile = ['urls', 'f', None, 'Urls file']
-    inputs =['http://google.com']
-    def test_cacheobject(self):
-        """
-        This detects the presence of a squid transparent HTTP proxy by sending
-        a request for cache_object://localhost/info.
-
-        The response to this request will usually also contain the squid
-        version number.
-        """
-        log.debug("Running")
-        def process_body(body):
-            if "Access Denied." in body:
-                self.report['transparent_http_proxy'] = True
-            else:
-                self.report['transparent_http_proxy'] = False
-
-        log.msg("Testing Squid proxy presence by sending a request for "\
-                "cache_object")
-        headers = {}
-        #headers["Host"] = [self.input]
-        self.report['trans_http_proxy'] = None
-        method = "GET"
-        body = "cache_object://localhost/info"
-        return self.doRequest(self.localOptions['backend'], method=method, body=body,
-                        headers=headers, body_processor=process_body)
-
-    def test_search_bad_request(self):
-        """
-        Attempts to perform a request with a random invalid HTTP method.
-
-        If we are being MITMed by a Transparent Squid HTTP proxy we will get
-        back a response containing the X-Squid-Error header.
-        """
-        def process_headers(headers):
-            log.debug("Processing headers in test_search_bad_request")
-            if 'X-Squid-Error' in headers:
-                log.msg("Detected the presence of a transparent HTTP "\
-                        "squid proxy")
-                self.report['trans_http_proxy'] = True
-            else:
-                log.msg("Did not detect the presence of transparent HTTP "\
-                        "squid proxy")
-                self.report['transparent_http_proxy'] = False
-
-        log.msg("Testing Squid proxy presence by sending a random bad request")
-        headers = {}
-        #headers["Host"] = [self.input]
-        method = utils.randomSTR(10, True)
-        self.report['transparent_http_proxy'] = None
-        return self.doRequest(self.localOptions['backend'], method=method,
-                        headers=headers, headers_processor=process_headers)
-
-    def test_squid_headers(self):
-        """
-        Detects the presence of a squid transparent HTTP proxy based on the
-        response headers it adds to the responses to requests.
-        """
-        def process_headers(headers):
-            """
-            Checks if any of the headers that squid is known to add match the
-            squid regexp.
-
-            We are looking for something that looks like this:
-
-                via: 1.0 cache_server:3128 (squid/2.6.STABLE21)
-                x-cache: MISS from cache_server
-                x-cache-lookup: MISS from cache_server:3128
-            """
-            squid_headers = {'via': r'.* \((squid.*)\)',
-                        'x-cache': r'MISS from (\w+)',
-                        'x-cache-lookup': r'MISS from (\w+:?\d+?)'
-                        }
-
-            self.report['transparent_http_proxy'] = False
-            for key in squid_headers.keys():
-                if key in headers:
-                    log.debug("Found %s in headers" % key)
-                    m = re.search(squid_headers[key], headers[key])
-                    if m:
-                        log.msg("Detected the presence of squid transparent"\
-                                " HTTP Proxy")
-                        self.report['transparent_http_proxy'] = True
-
-        log.msg("Testing Squid proxy by looking at response headers")
-        headers = {}
-        #headers["Host"] = [self.input]
-        method = "GET"
-        self.report['transparent_http_proxy'] = None
-        d = self.doRequest(self.localOptions['backend'], method=method,
-                        headers=headers, headers_processor=process_headers)
-        return d
-
-
diff --git a/nettests/manipulation/__init__.py b/nettests/manipulation/__init__.py
deleted file mode 100644
index e69de29..0000000
diff --git a/nettests/manipulation/captiveportal.py b/nettests/manipulation/captiveportal.py
deleted file mode 100644
index a0f8c6b..0000000
--- a/nettests/manipulation/captiveportal.py
+++ /dev/null
@@ -1,650 +0,0 @@
-# -*- coding: utf-8 -*-
-# captiveportal
-# *************
-#
-# This test is a collection of tests to detect the presence of a
-# captive portal. Code is taken, in part, from the old ooni-probe,
-# which was written by Jacob Appelbaum and Arturo Filastò.
-#
-# This module performs multiple tests that match specific vendor captive
-# portal tests. This is a basic internet captive portal filter tester written
-# for RECon 2011.
-#
-# Read the following URLs to understand the captive portal detection process
-# for various vendors:
-#
-# http://technet.microsoft.com/en-us/library/cc766017%28WS.10%29.aspx
-# http://blog.superuser.com/2011/05/16/windows-7-network-awareness/
-# http://isc.sans.org/diary.html?storyid=10312&;
-# http://src.chromium.org/viewvc/chrome?view=rev&revision=74608
-# http://code.google.com/p/chromium-os/issues/detail?3281ttp,
-# http://crbug.com/52489
-# http://crbug.com/71736
-# https://bugzilla.mozilla.org/show_bug.cgi?id=562917
-# https://bugzilla.mozilla.org/show_bug.cgi?id=603505
-# http://lists.w3.org/Archives/Public/ietf-http-wg/2011JanMar/0086.html
-# http://tools.ietf.org/html/draft-nottingham-http-portal-02
-#
-# :authors: Jacob Appelbaum, Arturo Filastò, Isis Lovecruft
-# :license: see LICENSE for more details
-
-import base64
-import os
-import random
-import re
-import string
-import urllib2
-from urlparse import urlparse
-
-from twisted.python import usage
-from twisted.internet import defer, threads
-
-from ooni import nettest
-from ooni.templates import httpt
-from ooni.utils import net
-from ooni.utils import log
-
-try:
-    from dns import resolver
-except ImportError:
-    print "The dnspython module was not found:"
-    print "See https://crate.io/packages/dnspython/";
-    resolver = None
-
-__plugoo__ = "captiveportal"
-__desc__ = "Captive portal detection test"
-
-class UsageOptions(usage.Options):
-    optParameters = [['asset', 'a', None, 'Asset file'],
-                 ['experiment-url', 'e', 'http://google.com/', 'Experiment URL'],
-                 ['user-agent', 'u', random.choice(net.userAgents),
-                  'User agent for HTTP requests']
-                ]
-
-class CaptivePortal(nettest.NetTestCase):
-    """
-    Compares content and status codes of HTTP responses, and attempts
-    to determine if content has been altered.
-    """
-
-    name = "captivep"
-    description = "Captive Portal Test"
-    version = '0.2'
-    author = "Isis Lovecruft"
-    usageOptions = UsageOptions
-
-    def http_fetch(self, url, headers={}):
-        """
-        Parses an HTTP url, fetches it, and returns a urllib2 response
-        object.
-        """
-        url = urlparse(url).geturl()
-        request = urllib2.Request(url, None, headers)
-        #XXX: HTTP Error 302: The HTTP server returned a redirect error that
-        #would lead to an infinite loop.  The last 30x error message was: Found
-        try:
-            response = urllib2.urlopen(request)
-            response_headers = dict(response.headers)
-            return response, response_headers
-        except urllib2.HTTPError, e:
-            log.err("HTTPError: %s" % e)
-            return None, None
-
-    def http_content_match_fuzzy_opt(self, experimental_url, control_result,
-                                     headers=None, fuzzy=False):
-        """
-        Makes an HTTP request on port 80 for experimental_url, then
-        compares the response_content of experimental_url with the
-        control_result. Optionally, if the fuzzy parameter is set to
-        True, the response_content is compared with a regex of the
-        control_result. If the response_content from the
-        experimental_url and the control_result match, returns True
-        with the HTTP status code and headers; False, status code, and
-        headers if otherwise.
-        """
-
-        if headers is None:
-            default_ua = self.local_options['user-agent']
-            headers = {'User-Agent': default_ua}
-
-        response, response_headers = self.http_fetch(experimental_url, headers)
-
-        response_content = response.read() if response else None
-        response_code = response.code if response else None
-        if response_content is None:
-            log.err("HTTP connection appears to have failed.")
-            return False, False, False
-
-        if fuzzy:
-            pattern = re.compile(control_result)
-            match = pattern.search(response_content)
-            log.msg("Fuzzy HTTP content comparison for experiment URL")
-            log.msg("'%s'" % experimental_url)
-            if not match:
-                log.msg("does not match!")
-                return False, response_code, response_headers
-            else:
-                log.msg("and the expected control result yielded a match.")
-                return True, response_code, response_headers
-        else:
-            if str(response_content) != str(control_result):
-                log.msg("HTTP content comparison of experiment URL")
-                log.msg("'%s'" % experimental_url)
-                log.msg("and the expected control result do not match.")
-                return False, response_code, response_headers
-            else:
-                return True, response_code, response_headers
-
-    def http_status_code_match(self, experiment_code, control_code):
-        """
-        Compare two HTTP status codes, returns True if they match.
-        """
-        return int(experiment_code) == int(control_code)
-
-    def http_status_code_no_match(self, experiment_code, control_code):
-        """
-        Compare two HTTP status codes, returns True if they do not match.
-        """
-        return int(experiment_code) != int(control_code)
-
-    def dns_resolve(self, hostname, nameserver=None):
-        """
-        Resolves hostname(s) though nameserver to corresponding
-        address(es). hostname may be either a single hostname string,
-        or a list of strings. If nameserver is not given, use local
-        DNS resolver, and if that fails try using 8.8.8.8.
-        """
-        if not resolver:
-            log.msg("dnspython is not installed.\
-                    Cannot perform DNS Resolve test")
-            return []
-        if isinstance(hostname, str):
-            hostname = [hostname]
-
-        if nameserver is not None:
-            res = resolver.Resolver(configure=False)
-            res.nameservers = [nameserver]
-        else:
-            res = resolver.Resolver()
-
-        response = []
-        answer = None
-
-        for hn in hostname:
-            try:
-                answer = res.query(hn)
-            except resolver.NoNameservers:
-                res.nameservers = ['8.8.8.8']
-                try:
-                    answer = res.query(hn)
-                except resolver.NXDOMAIN:
-                    log.msg("DNS resolution for %s returned NXDOMAIN" % hn)
-                    response.append('NXDOMAIN')
-            except resolver.NXDOMAIN:
-                log.msg("DNS resolution for %s returned NXDOMAIN" % hn)
-                response.append('NXDOMAIN')
-            finally:
-                if not answer:
-                    return response
-                for addr in answer:
-                    response.append(addr.address)
-        return response
-
-    def dns_resolve_match(self, experiment_hostname, control_address):
-        """
-        Resolve experiment_hostname, and check to see that it returns
-        an experiment_address which matches the control_address.  If
-        they match, returns True and experiment_address; otherwise
-        returns False and experiment_address.
-        """
-        experiment_address = self.dns_resolve(experiment_hostname)
-        if not experiment_address:
-            log.debug("dns_resolve() for %s failed" % experiment_hostname)
-            return None, experiment_address
-
-        if len(set(experiment_address) & set([control_address])) > 0:
-            return True, experiment_address
-        else:
-            log.msg("DNS comparison of control '%s' does not" % control_address)
-            log.msg("match experiment response '%s'" % experiment_address)
-            return False, experiment_address
-
-    def get_auth_nameservers(self, hostname):
-        """
-        Many CPs set a nameserver to be used. Let's query that
-        nameserver for the authoritative nameservers of hostname.
-
-        The equivalent of:
-        $ dig +short NS ooni.nu
-        """
-        if not resolver:
-            log.msg("dnspython not installed.")
-            log.msg("Cannot perform test.")
-            return []
-
-        res = resolver.Resolver()
-        answer = res.query(hostname, 'NS')
-        auth_nameservers = []
-        for auth in answer:
-            auth_nameservers.append(auth.to_text())
-        return auth_nameservers
-
-    def hostname_to_0x20(self, hostname):
-        """
-        MaKEs yOur HOsTnaME lOoK LiKE THis.
-
-        For more information, see:
-        D. Dagon, et. al. "Increased DNS Forgery Resistance
-        Through 0x20-Bit Encoding". Proc. CSS, 2008.
-        """
-        hostname_0x20 = ''
-        for char in hostname:
-            l33t = random.choice(['caps', 'nocaps'])
-            if l33t == 'caps':
-                hostname_0x20 += char.capitalize()
-            else:
-                hostname_0x20 += char.lower()
-        return hostname_0x20
-
-    def check_0x20_to_auth_ns(self, hostname, sample_size=None):
-        """
-        Resolve a 0x20 DNS request for hostname over hostname's
-        authoritative nameserver(s), and check to make sure that
-        the capitalization in the 0x20 request matches that of the
-        response. Also, check the serial numbers of the SOA (Start
-        of Authority) records on the authoritative nameservers to
-        make sure that they match.
-
-        If sample_size is given, a random sample equal to that number
-        of authoritative nameservers will be queried; default is 5.
-        """
-        log.msg("")
-        log.msg("Testing random capitalization of DNS queries...")
-        log.msg("Testing that Start of Authority serial numbers match...")
-
-        auth_nameservers = self.get_auth_nameservers(hostname)
-
-        if sample_size is None:
-            sample_size = 5
-            resolved_auth_ns = random.sample(self.dns_resolve(auth_nameservers),
-                                             sample_size)
-
-        querynames = []
-        answernames = []
-        serials = []
-
-        # Even when gevent monkey patching is on, the requests here
-        # are sent without being 0x20'd, so we need to 0x20 them.
-        hostname = self.hostname_to_0x20(hostname)
-
-        for auth_ns in resolved_auth_ns:
-            res = resolver.Resolver(configure=False)
-            res.nameservers = [auth_ns]
-            try:
-                answer = res.query(hostname, 'SOA')
-            except resolver.Timeout:
-                continue
-            querynames.append(answer.qname.to_text())
-            answernames.append(answer.rrset.name.to_text())
-            for soa in answer:
-                serials.append(str(soa.serial))
-
-        if len(set(querynames).intersection(answernames)) == 1:
-            log.msg("Capitalization in DNS queries and responses match.")
-            name_match = True
-        else:
-            log.msg("The random capitalization '%s' used in" % hostname)
-            log.msg("DNS queries to that hostname's authoritative")
-            log.msg("nameservers does not match the capitalization in")
-            log.msg("the response.")
-            name_match = False
-
-        if len(set(serials)) == 1:
-            log.msg("Start of Authority serial numbers all match.")
-            serial_match = True
-        else:
-            log.msg("Some SOA serial numbers did not match the rest!")
-            serial_match = False
-
-        ret = name_match, serial_match, querynames, answernames, serials
-
-        if name_match and serial_match:
-            log.msg("Your DNS queries do not appear to be tampered.")
-            return ret
-        elif name_match or serial_match:
-            log.msg("Something is tampering with your DNS queries.")
-            return ret
-        elif not name_match and not serial_match:
-            log.msg("Your DNS queries are definitely being tampered with.")
-            return ret
-
-    def get_random_url_safe_string(self, length):
-        """
-        Returns a random url-safe string of specified length, where
-        0 < length <= 256. The returned string will always start with
-        an alphabetic character.
-        """
-        if (length <= 0):
-            length = 1
-        elif (length > 256):
-            length = 256
-
-        random_ascii = base64.urlsafe_b64encode(os.urandom(int(length)))
-
-        while not random_ascii[:1].isalpha():
-            random_ascii = base64.urlsafe_b64encode(os.urandom(int(length)))
-
-        three_quarters = int((len(random_ascii)) * (3.0/4.0))
-        random_string = random_ascii[:three_quarters]
-        return random_string
-
-    def get_random_hostname(self, length=None):
-        """
-        Returns a random hostname with SLD of specified length. If
-        length is unspecified, length=32 is used.
-
-        These *should* all resolve to NXDOMAIN. If they actually
-        resolve to a box that isn't part of a captive portal that
-        would be rather interesting.
-        """
-        if length is None:
-            length = 32
-
-        random_sld = self.get_random_url_safe_string(length)
-
-        # if it doesn't start with a letter, chuck it.
-        while not random_sld[:1].isalpha():
-            random_sld = self.get_random_url_safe_string(length)
-
-        tld_list = ['.com', '.net', '.org', '.info', '.test', '.invalid']
-        random_tld = urllib2.random.choice(tld_list)
-        random_hostname = random_sld + random_tld
-        return random_hostname
-
-    def compare_random_hostnames(self, hostname_count=None, hostname_length=None):
-        """
-        Get hostname_count number of random hostnames with SLD length
-        of hostname_length, and then attempt DNS resolution. If no
-        arguments are given, default to three hostnames of 32 bytes
-        each. These random hostnames *should* resolve to NXDOMAIN,
-        except in the case where a user is presented with a captive
-        portal and remains unauthenticated, in which case the captive
-        portal may return the address of the authentication page.
-
-        If the cardinality of the intersection of the set of resolved
-        random hostnames and the single element control set
-        (['NXDOMAIN']) are equal to one, then DNS properly resolved.
-
-        Returns true if only NXDOMAINs were returned, otherwise returns
-        False with the relative complement of the control set in the
-        response set.
-        """
-        if hostname_count is None:
-            hostname_count = 3
-
-        log.msg("Generating random hostnames...")
-        log.msg("Resolving DNS for %d random hostnames..." % hostname_count)
-
-        control = ['NXDOMAIN']
-        responses = []
-
-        for x in range(hostname_count):
-            random_hostname = self.get_random_hostname(hostname_length)
-            response_match, response_address = self.dns_resolve_match(random_hostname,
-                                                                      control[0])
-            for address in response_address:
-                if response_match is False:
-                    log.msg("Strangely, DNS resolution of the random hostname")
-                    log.msg("%s actually points to %s"
-                             % (random_hostname, response_address))
-                    responses = responses + [address]
-                else:
-                    responses = responses + [address]
-
-        intersection = set(responses) & set(control)
-        relative_complement = set(responses) - set(control)
-        r = set(responses)
-
-        if len(intersection) == 1:
-            log.msg("All %d random hostnames properly resolved to NXDOMAIN."
-                     % hostname_count)
-            return True, relative_complement
-        elif (len(intersection) == 1) and (len(r) > 1):
-            log.msg("Something odd happened. Some random hostnames correctly")
-            log.msg("resolved to NXDOMAIN, but several others resolved to")
-            log.msg("to the following addresses: %s" % relative_complement)
-            return False, relative_complement
-        elif (len(intersection) == 0) and (len(r) == 1):
-            log.msg("All random hostnames resolved to the IP address ")
-            log.msg("'%s', which is indicative of a captive portal." % r)
-            return False, relative_complement
-        else:
-            log.debug("Apparently, pigs are flying on your network, 'cause a")
-            log.debug("bunch of hostnames made from 32-byte random strings")
-            log.debug("just magically resolved to a bunch of random addresses.")
-            log.debug("That is definitely highly improbable. In fact, my napkin")
-            log.debug("tells me that the probability of just one of those")
-            log.debug("hostnames resolving to an address is 1.68e-59, making")
-            log.debug("it nearly twice as unlikely as an MD5 hash collision.")
-            log.debug("Either someone is seriously messing with your network,")
-            log.debug("or else you are witnessing the impossible. %s" % r)
-            return False, relative_complement
-
-    def google_dns_cp_test(self):
-        """
-        Google Chrome resolves three 10-byte random hostnames.
-        """
-        subtest = "Google Chrome DNS-based"
-        log.msg("Running the Google Chrome DNS-based captive portal test...")
-
-        gmatch, google_dns_result = self.compare_random_hostnames(3, 10)
-
-        if gmatch:
-            log.msg("Google Chrome DNS-based captive portal test did not")
-            log.msg("detect a captive portal.")
-            return google_dns_result
-        else:
-            log.msg("Google Chrome DNS-based captive portal test believes")
-            log.msg("you are in a captive portal, or else something very")
-            log.msg("odd is happening with your DNS.")
-            return google_dns_result
-
-    def ms_dns_cp_test(self):
-        """
-        Microsoft "phones home" to a server which will always resolve
-        to the same address.
-        """
-        subtest = "Microsoft NCSI DNS-based"
-
-        log.msg("")
-        log.msg("Running the Microsoft NCSI DNS-based captive portal")
-        log.msg("test...")
-
-        msmatch, ms_dns_result = self.dns_resolve_match("dns.msftncsi.com",
-                                                        "131.107.255.255")
-        if msmatch:
-            log.msg("Microsoft NCSI DNS-based captive portal test did not")
-            log.msg("detect a captive portal.")
-            return ms_dns_result
-        else:
-            log.msg("Microsoft NCSI DNS-based captive portal test ")
-            log.msg("believes you are in a captive portal.")
-            return ms_dns_result
-
-    def run_vendor_dns_tests(self):
-        """
-        Run the vendor DNS tests.
-        """
-        report = {}
-        report['google_dns_cp'] = self.google_dns_cp_test()
-        report['ms_dns_cp'] = self.ms_dns_cp_test()
-
-        return report
-
-    def run_vendor_tests(self, *a, **kw):
-        """
-        These are several vendor tests used to detect the presence of
-        a captive portal. Each test compares HTTP status code and
-        content to the control results and has its own User-Agent
-        string, in order to emulate the test as it would occur on the
-        device it was intended for. Vendor tests are defined in the
-        format:
-        [exp_url, ctrl_result, ctrl_code, ua, test_name]
-        """
-
-        vendor_tests = [['http://www.apple.com/library/test/success.html',
-                         'Success',
-                         '200',
-                         'Mozilla/5.0 (iPhone; U; CPU like Mac OS X; en) AppleWebKit/420+ (KHTML, like Gecko) Version/3.0 Mobile/1A543a Safari/419.3',
-                         'Apple HTTP Captive Portal'],
-                        ['http://tools.ietf.org/html/draft-nottingham-http-portal-02',
-                         '428 Network Authentication Required',
-                         '428',
-                         'Mozilla/5.0 (Windows NT 6.1; rv:5.0) Gecko/20100101 Firefox/5.0',
-                         'W3 Captive Portal'],
-                        ['http://www.msftncsi.com/ncsi.txt',
-                         'Microsoft NCSI',
-                         '200',
-                         'Microsoft NCSI',
-                         'MS HTTP Captive Portal',]]
-
-        cm = self.http_content_match_fuzzy_opt
-        sm = self.http_status_code_match
-        snm = self.http_status_code_no_match
-
-        def compare_content(status_func, fuzzy, experiment_url, control_result,
-                            control_code, headers, test_name):
-            log.msg("")
-            log.msg("Running the %s test..." % test_name)
-
-            content_match, experiment_code, experiment_headers = cm(experiment_url,
-                                                                    control_result,
-                                                                    headers, fuzzy)
-            status_match = status_func(experiment_code, control_code)
-
-            if status_match and content_match:
-                log.msg("The %s test was unable to detect" % test_name)
-                log.msg("a captive portal.")
-                return True
-            else:
-                log.msg("The %s test shows that your network" % test_name)
-                log.msg("is filtered.")
-                return False
-
-        result = []
-        for vt in vendor_tests:
-            report = {}
-            report['vt'] = vt
-
-            experiment_url = vt[0]
-            control_result = vt[1]
-            control_code = vt[2]
-            headers = {'User-Agent': vt[3]}
-            test_name = vt[4]
-
-            args = (experiment_url, control_result, control_code, headers, test_name)
-
-            if test_name == "MS HTTP Captive Portal":
-                report['result'] = compare_content(sm, False, *args)
-
-            elif test_name == "Apple HTTP Captive Portal":
-                report['result'] = compare_content(sm, True, *args)
-
-            elif test_name == "W3 Captive Portal":
-                report['result'] = compare_content(snm, True, *args)
-
-            else:
-                log.err("Ooni is trying to run an undefined CP vendor test.")
-            result.append(report)
-        return result
-
-    def control(self, experiment_result, args):
-        """
-        Compares the content and status code of the HTTP response for
-        experiment_url with the control_result and control_code
-        respectively. If the status codes match, but the experimental
-        content and control_result do not match, fuzzy matching is enabled
-        to determine if the control_result is at least included somewhere
-        in the experimental content. Returns True if matches are found,
-        and False if otherwise.
-        """
-        # XXX put this back to being parametrized
-        #experiment_url = self.local_options['experiment-url']
-        experiment_url = 'http://google.com/'
-        control_result = 'XX'
-        control_code = 200
-        ua = self.local_options['user-agent']
-
-        cm = self.http_content_match_fuzzy_opt
-        sm = self.http_status_code_match
-        snm = self.http_status_code_no_match
-
-        log.msg("Running test for '%s'..." % experiment_url)
-        content_match, experiment_code, experiment_headers = cm(experiment_url,
-                                                                control_result)
-        status_match = sm(experiment_code, control_code)
-        if status_match and content_match:
-            log.msg("The test for '%s'" % experiment_url)
-            log.msg("was unable to detect a captive portal.")
-
-            self.report['result'] = True
-
-        elif status_match and not content_match:
-            log.msg("Retrying '%s' with fuzzy match enabled."
-                     % experiment_url)
-            fuzzy_match, experiment_code, experiment_headers = cm(experiment_url,
-                                                                  control_result,
-                                                                  fuzzy=True)
-            if fuzzy_match:
-                self.report['result'] = True
-            else:
-                log.msg("Found modified content on '%s'," % experiment_url)
-                log.msg("which could indicate a captive portal.")
-
-                self.report['result'] = False
-        else:
-            log.msg("The content comparison test for ")
-            log.msg("'%s'" % experiment_url)
-            log.msg("shows that your HTTP traffic is filtered.")
-
-            self.report['result'] = False
-
-    @defer.inlineCallbacks
-    def test_captive_portal(self):
-        """
-        Runs the CaptivePortal(Test).
-
-        CONFIG OPTIONS
-        --------------
-
-        If "do_captive_portal_vendor_tests" is set to "true", then vendor
-        specific captive portal HTTP-based tests will be run.
-
-        If "do_captive_portal_dns_tests" is set to "true", then vendor
-        specific captive portal DNS-based tests will be run.
-
-        If "check_dns_requests" is set to "true", then Ooni-probe will
-        attempt to check that your DNS requests are not being tampered with
-        by a captive portal.
-
-        If "captive_portal" = "yourfilename.txt", then user-specified tests
-        will be run.
-
-        Any combination of the above tests can be run.
-        """
-
-        log.msg("")
-        log.msg("Running vendor tests...")
-        self.report['vendor_tests'] = yield threads.deferToThread(self.run_vendor_tests)
-
-        log.msg("")
-        log.msg("Running vendor DNS-based tests...")
-        self.report['vendor_dns_tests'] = yield threads.deferToThread(self.run_vendor_dns_tests)
-
-        log.msg("")
-        log.msg("Checking that DNS requests are not being tampered...")
-        self.report['check0x20'] = yield threads.deferToThread(self.check_0x20_to_auth_ns, 'ooni.nu')
-
-        log.msg("")
-        log.msg("Captive portal test finished!")
-
diff --git a/nettests/manipulation/daphne.py b/nettests/manipulation/daphne.py
deleted file mode 100644
index 09279fa..0000000
--- a/nettests/manipulation/daphne.py
+++ /dev/null
@@ -1,119 +0,0 @@
-# -*- encoding: utf-8 -*-
-from twisted.python import usage
-from twisted.internet import protocol, endpoints, reactor
-
-from ooni import nettest
-from ooni.kit import daphn3
-from ooni.utils import log
-
-class Daphn3ClientProtocol(daphn3.Daphn3Protocol):
-    def nextStep(self):
-        log.debug("Moving on to next step in the state walk")
-        self.current_data_received = 0
-        if self.current_step >= (len(self.steps) - 1):
-            log.msg("Reached the end of the state machine")
-            log.msg("Censorship fingerpint bisected!")
-            step_idx, mutation_idx = self.factory.mutation
-            log.msg("step_idx: %s | mutation_id: %s" % (step_idx, mutation_idx))
-            #self.transport.loseConnection()
-            if self.report:
-                self.report['mutation_idx'] = mutation_idx
-                self.report['step_idx'] = step_idx
-            self.d.callback(None)
-            return
-        else:
-            self.current_step += 1
-        if self._current_step_role() == self.role:
-            # We need to send more data because we are again responsible for
-            # doing so.
-            self.sendPayload()
-
-
-class Daphn3ClientFactory(protocol.ClientFactory):
-    protocol = daphn3.Daphn3Protocol
-    mutation = [0,0]
-    steps = None
-
-    def buildProtocol(self, addr):
-        p = self.protocol()
-        p.steps = self.steps
-        p.factory = self
-        return p
-
-    def startedConnecting(self, connector):
-        log.msg("Started connecting %s" % connector)
-
-    def clientConnectionFailed(self, reason, connector):
-        log.err("We failed connecting the the OONIB")
-        log.err("Cannot perform test. Perhaps it got blocked?")
-        log.err("Please report this to tor-assistants@xxxxxxxxxxxxxx")
-
-    def clientConnectionLost(self, reason, connector):
-        log.err("Daphn3 client connection lost")
-        print reason
-
-class daphn3Args(usage.Options):
-    optParameters = [
-                     ['host', 'h', '127.0.0.1', 'Target Hostname'],
-                     ['port', 'p', 57003, 'Target port number']]
-
-    optFlags = [['pcap', 'c', 'Specify that the input file is a pcap file'],
-                ['yaml', 'y', 'Specify that the input file is a YAML file (default)']]
-
-class daphn3Test(nettest.NetTestCase):
-
-    name = "Daphn3"
-    usageOptions = daphn3Args
-    inputFile = ['file', 'f', None, 
-            'Specify the pcap or YAML file to be used as input to the test']
-
-    #requiredOptions = ['file']
-
-    steps = None
-
-    def inputProcessor(self, filename):
-        """
-        step_idx is the step in the packet exchange
-        ex.
-        [.X.] are packets sent by a client or a server
-
-            client:  [.1.]        [.3.] [.4.]
-            server:         [.2.]             [.5.]
-
-        mutation_idx: is the sub index of the packet as in the byte of the
-        packet at the step_idx that is to be mutated
-
-        """
-        if self.localOptions['pcap']:
-            daphn3Steps = daphn3.read_pcap(filename)
-        else:
-            daphn3Steps = daphn3.read_yaml(filename)
-        log.debug("Loaded these steps %s" % daphn3Steps)
-        yield daphn3Steps
-
-    def test_daphn3(self):
-        host = self.localOptions['host']
-        port = int(self.localOptions['port'])
-
-        def failure(failure):
-            log.msg("Failed to connect")
-            self.report['censored'] = True
-            self.report['mutation'] = 0
-            raise Exception("Error in connection, perhaps the backend is censored")
-            return
-
-        def success(protocol):
-            log.msg("Successfully connected")
-            protocol.sendPayload()
-            return protocol.d
-
-        log.msg("Connecting to %s:%s" % (host, port))
-        endpoint = endpoints.TCP4ClientEndpoint(reactor, host, port)
-        daphn3_factory = Daphn3ClientFactory()
-        daphn3_factory.steps = self.input
-        daphn3_factory.report = self.report
-        d = endpoint.connect(daphn3_factory)
-        d.addErrback(failure)
-        d.addCallback(success)
-        return d
-
diff --git a/nettests/manipulation/dnsspoof.py b/nettests/manipulation/dnsspoof.py
deleted file mode 100644
index 5c50c2f..0000000
--- a/nettests/manipulation/dnsspoof.py
+++ /dev/null
@@ -1,69 +0,0 @@
-from twisted.internet import defer
-from twisted.python import usage
-
-from scapy.all import IP, UDP, DNS, DNSQR
-
-from ooni.templates import scapyt
-from ooni.utils import log
-
-class UsageOptions(usage.Options):
-    optParameters = [['resolver', 'r', None,
-                    'Specify the resolver that should be used for DNS queries (ip:port)'],
-                    ['hostname', 'h', None,
-                        'Specify the hostname of a censored site'],
-                    ['backend', 'b', '8.8.8.8:53',
-                        'Specify the IP address of a good DNS resolver (ip:port)']
-                    ]
-
-
-class DNSSpoof(scapyt.ScapyTest):
-    name = "DNS Spoof"
-    timeout = 2
-
-    usageOptions = UsageOptions
-
-    requiredOptions = ['hostname', 'resolver']
-
-    def setUp(self):
-        self.resolverAddr, self.resolverPort = self.localOptions['resolver'].split(':')
-        self.resolverPort = int(self.resolverPort)
-
-        self.controlResolverAddr, self.controlResolverPort = self.localOptions['backend'].split(':')
-        self.controlResolverPort = int(self.controlResolverPort)
-
-        self.hostname = self.localOptions['hostname']
-
-    def postProcessor(self, report):
-        """
-        This is not tested, but the concept is that if the two responses
-        match up then spoofing is occuring.
-        """
-        try:
-            test_answer = report['test_a_lookup']['answered_packets'][0][1]
-            control_answer = report['test_control_a_lookup']['answered_packets'][0][1]
-        except IndexError:
-            self.report['spoofing'] = 'no_answer'
-            return
-
-        if test_answer[UDP] == control_answer[UDP]:
-                self.report['spoofing'] = True
-        else:
-            self.report['spoofing'] = False
-        return
-
-    @defer.inlineCallbacks
-    def test_a_lookup(self):
-        question = IP(dst=self.resolverAddr)/UDP()/DNS(rd=1,
-                qd=DNSQR(qtype="A", qclass="IN", qname=self.hostname))
-        log.msg("Performing query to %s with %s:%s" % (self.hostname, self.resolverAddr, self.resolverPort))
-        yield self.sr1(question)
-
-    @defer.inlineCallbacks
-    def test_control_a_lookup(self):
-        question = IP(dst=self.controlResolverAddr)/UDP()/DNS(rd=1,
-                qd=DNSQR(qtype="A", qclass="IN", qname=self.hostname))
-        log.msg("Performing query to %s with %s:%s" % (self.hostname,
-            self.controlResolverAddr, self.controlResolverPort))
-        yield self.sr1(question)
-
-
diff --git a/nettests/manipulation/http_header_field_manipulation.py b/nettests/manipulation/http_header_field_manipulation.py
deleted file mode 100644
index 509f4ef..0000000
--- a/nettests/manipulation/http_header_field_manipulation.py
+++ /dev/null
@@ -1,189 +0,0 @@
-# -*- encoding: utf-8 -*-
-#
-# :authors: Arturo Filastò
-# :licence: see LICENSE
-
-import random
-import json
-import yaml
-
-from twisted.python import usage
-
-from ooni.utils import log, net, randomStr
-from ooni.templates import httpt
-from ooni.utils.txagentwithsocks import TrueHeaders
-
-def random_capitalization(string):
-    output = ""
-    original_string = string
-    string = string.swapcase()
-    for i in range(len(string)):
-        if random.randint(0, 1):
-            output += string[i].swapcase()
-        else:
-            output += string[i]
-    if original_string == output:
-        return random_capitalization(output)
-    else:
-        return output
-
-class UsageOptions(usage.Options):
-    optParameters = [
-            ['backend', 'b', 'http://127.0.0.1:57001', 
-                'URL of the backend to use for sending the requests'],
-            ['headers', 'h', None,
-                'Specify a yaml formatted file from which to read the request headers to send']
-            ]
-
-class HTTPHeaderFieldManipulation(httpt.HTTPTest):
-    """
-    It performes HTTP requests with request headers that vary capitalization
-    towards a backend. If the headers reported by the server differ from
-    the ones we sent, then we have detected tampering.
-    """
-    name = "HTTP Header Field Manipulation"
-    author = "Arturo Filastò"
-    version = "0.1.3"
-
-    randomizeUA = False
-    usageOptions = UsageOptions
-
-    requiredOptions = ['backend']
-
-    def get_headers(self):
-        headers = {}
-        if self.localOptions['headers']:
-            try:
-                f = open(self.localOptions['headers'])
-            except IOError:
-                raise Exception("Specified input file does not exist")
-            content = ''.join(f.readlines())
-            f.close()
-            headers = yaml.safe_load(content)
-            return headers
-        else:
-            # XXX generate these from a random choice taken from whatheaders.com
-            # http://s3.amazonaws.com/data.whatheaders.com/whatheaders-latest.xml.zip
-            headers = {"User-Agent": [random.choice(net.userAgents)],
-                "Accept": ["text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8"],
-                "Accept-Encoding": ["gzip,deflate,sdch"],
-                "Accept-Language": ["en-US,en;q=0.8"],
-                "Accept-Charset": ["ISO-8859-1,utf-8;q=0.7,*;q=0.3"],
-                "Host": [randomStr(15)+'.com']
-            }
-            return headers
-
-    def get_random_caps_headers(self):
-        headers = {}
-        normal_headers = self.get_headers()
-        for k, v in normal_headers.items():
-            new_key = random_capitalization(k)
-            headers[new_key] = v
-        return headers
-
-    def processInputs(self):
-        if self.localOptions['backend']:
-            self.url = self.localOptions['backend']
-        else:
-            raise Exception("No backend specified")
-
-    def processResponseBody(self, data):
-        self.check_for_tampering(data)
-
-    def check_for_tampering(self, data):
-        """
-        Here we do checks to verify if the request we made has been tampered
-        with. We have 3 categories of tampering:
-
-        *  **total** when the response is not a json object and therefore we were not
-        able to reach the ooniprobe test backend
-
-        *  **request_line_capitalization** when the HTTP Request line (e.x. GET /
-        HTTP/1.1) does not match the capitalization we set.
-
-        *  **header_field_number** when the number of headers we sent does not match
-        with the ones the backend received
-
-        *  **header_name_capitalization** when the header field names do not match
-        those that we sent.
-
-        *  **header_field_value** when the header field value does not match with the
-        one we transmitted.
-        """
-        log.msg("Checking for tampering on %s" % self.url)
-
-        self.report['tampering'] = {
-            'total': False,
-            'request_line_capitalization': False,
-            'header_name_capitalization': False,
-            'header_field_value': False,
-            'header_field_number': False
-        }
-        try:
-            response = json.loads(data)
-        except ValueError:
-            self.report['tampering']['total'] = True
-            return
-
-        request_request_line = "%s / HTTP/1.1" % self.request_method
-
-        try:
-            response_request_line = response['request_line']
-            response_headers_dict = response['headers_dict']
-        except KeyError:
-            self.report['tampering']['total'] = True
-            return
-
-        if request_request_line != response_request_line:
-            self.report['tampering']['request_line_capitalization'] = True
-
-        request_headers = TrueHeaders(self.request_headers)
-        diff = request_headers.getDiff(TrueHeaders(response_headers_dict),
-                ignore=['Connection'])
-        if diff:
-            self.report['tampering']['header_field_name'] = True
-        else:
-            self.report['tampering']['header_field_name'] = False
-        self.report['tampering']['header_name_diff'] = list(diff)
-        log.msg("    total: %(total)s" % self.report['tampering'])
-        log.msg("    request_line_capitalization: %(request_line_capitalization)s" % self.report['tampering'])
-        log.msg("    header_name_capitalization: %(header_name_capitalization)s" % self.report['tampering'])
-        log.msg("    header_field_value: %(header_field_value)s" % self.report['tampering'])
-        log.msg("    header_field_number: %(header_field_number)s" % self.report['tampering'])
-
-    def test_get(self):
-        self.request_method = "GET"
-        self.request_headers = self.get_random_caps_headers()
-        return self.doRequest(self.url, self.request_method,
-                headers=self.request_headers)
-
-    def test_get_random_capitalization(self):
-        self.request_method = random_capitalization("GET")
-        self.request_headers = self.get_random_caps_headers()
-        return self.doRequest(self.url, self.request_method,
-                headers=self.request_headers)
-
-    def test_post(self):
-        self.request_method = "POST"
-        self.request_headers = self.get_headers()
-        return self.doRequest(self.url, self.request_method,
-                headers=self.request_headers)
-
-    def test_post_random_capitalization(self):
-        self.request_method = random_capitalization("POST")
-        self.request_headers = self.get_random_caps_headers()
-        return self.doRequest(self.url, self.request_method,
-                headers=self.request_headers)
-
-    def test_put(self):
-        self.request_method = "PUT"
-        self.request_headers = self.get_headers()
-        return self.doRequest(self.url, self.request_method,
-                headers=self.request_headers)
-
-    def test_put_random_capitalization(self):
-        self.request_method = random_capitalization("PUT")
-        self.request_headers = self.get_random_caps_headers()
-        return self.doRequest(self.url, self.request_method,
-                headers=self.request_headers)
-
diff --git a/nettests/manipulation/http_host.py b/nettests/manipulation/http_host.py
deleted file mode 100644
index d95d836..0000000
--- a/nettests/manipulation/http_host.py
+++ /dev/null
@@ -1,141 +0,0 @@
-# -*- encoding: utf-8 -*-
-#
-# HTTP Host Test
-# **************
-#
-# :authors: Arturo Filastò
-# :licence: see LICENSE
-
-import json
-from twisted.python import usage
-
-from ooni.utils import randomStr, randomSTR
-
-from ooni.utils import log
-from ooni.templates import httpt
-
-class UsageOptions(usage.Options):
-    optParameters = [['backend', 'b', 'http://127.0.0.1:57001',
-                      'URL of the test backend to use. Should be \
-                              listening on port 80 and be a \
-                              HTTPReturnJSONHeadersHelper'],
-                     ['content', 'c', None, 'The file to read \
-                            from containing the content of a block page']]
-
-class HTTPHost(httpt.HTTPTest):
-    """
-    This test is aimed at detecting the presence of a transparent HTTP proxy
-    and enumerating the sites that are being censored by it.
-
-    It places inside of the Host header field the hostname of the site that is
-    to be tested for censorship and then determines if the probe is behind a
-    transparent HTTP proxy (because the response from the backend server does
-    not match) and if the site is censorsed, by checking if the page that it
-    got back matches the input block page.
-    """
-    name = "HTTP Host"
-    author = "Arturo Filastò"
-    version = "0.2.3"
-
-    randomizeUA = False
-    usageOptions = UsageOptions
-
-    inputFile = ['file', 'f', None,
-            'List of hostnames to test for censorship']
-
-    requiredOptions = ['backend']
-
-    def test_filtering_prepend_newline_to_method(self):
-        headers = {}
-        headers["Host"] = [self.input]
-        return self.doRequest(self.localOptions['backend'], method="\nGET",
-                headers=headers)
-
-    def test_filtering_add_tab_to_host(self):
-        headers = {}
-        headers["Host"] = [self.input + '\t']
-        return self.doRequest(self.localOptions['backend'],
-                headers=headers)
-
-    def test_filtering_of_subdomain(self):
-        headers = {}
-        headers["Host"] = [randomStr(10) + '.' + self.input]
-        return self.doRequest(self.localOptions['backend'],
-                headers=headers)
-
-    def test_filtering_via_fuzzy_matching(self):
-        headers = {}
-        headers["Host"] = [randomStr(10) + self.input + randomStr(10)]
-        return self.doRequest(self.localOptions['backend'],
-                headers=headers)
-
-    def test_send_host_header(self):
-        """
-        Stuffs the HTTP Host header field with the site to be tested for
-        censorship and does an HTTP request of this kind to our backend.
-
-        We randomize the HTTP User Agent headers.
-        """
-        headers = {}
-        headers["Host"] = [self.input]
-        return self.doRequest(self.localOptions['backend'],
-                headers=headers)
-
-    def check_for_censorship(self, body):
-        """
-        If we have specified what a censorship page looks like here we will
-        check if the page we are looking at matches it.
-
-        XXX this is not tested, though it is basically what was used to detect
-        censorship in the palestine case.
-        """
-        if self.localOptions['content']:
-            self.report['censored'] = True
-            censorship_page = open(self.localOptions['content'])
-            response_page = iter(body.split("\n"))
-
-            for censorship_line in censorship_page.xreadlines():
-                response_line = response_page.next()
-                if response_line != censorship_line:
-                    self.report['censored'] = False
-                    break
-
-            censorship_page.close()
-        else:
-            self.report['censored'] = None
-
-    def processResponseBody(self, body):
-        """
-        XXX this is to be filled in with either a domclass based classified or
-        with a rule that will allow to detect that the body of the result is
-        that of a censored site.
-        """
-        # If we don't see a json array we know that something is wrong for
-        # sure
-        if not body.startswith("{"):
-            log.msg("This does not appear to be JSON")
-            self.report['transparent_http_proxy'] = True
-            self.check_for_censorship(body)
-            return
-        try:
-            content = json.loads(body)
-        except:
-            log.msg("The json does not parse, this is not what we expected")
-            self.report['transparent_http_proxy'] = True
-            self.check_for_censorship(body)
-            return
-
-        # We base the determination of the presence of a transparent HTTP
-        # proxy on the basis of the response containing the json that is to be
-        # returned by a HTTP Request Test Helper
-        if 'request_headers' in content and \
-                'request_line' in content and \
-                'headers_dict' in content:
-            log.msg("Found the keys I expected in %s" % content)
-            self.report['transparent_http_proxy'] = False
-            self.report['censored'] = False
-        else:
-            log.msg("Did not find the keys I expected in %s" % content)
-            self.report['transparent_http_proxy'] = True
-            self.check_for_censorship(body)
-
diff --git a/nettests/manipulation/http_invalid_request_line.py b/nettests/manipulation/http_invalid_request_line.py
deleted file mode 100644
index 2482282..0000000
--- a/nettests/manipulation/http_invalid_request_line.py
+++ /dev/null
@@ -1,106 +0,0 @@
-# -*- encoding: utf-8 -*-
-from twisted.python import usage
-
-from ooni.utils import log
-from ooni.utils import randomStr, randomSTR
-from ooni.templates import tcpt
-
-class UsageOptions(usage.Options):
-    optParameters = [['backend', 'b', '127.0.0.1',
-                        'The OONI backend that runs a TCP echo server'],
-                    ['backendport', 'p', 80, 'Specify the port that the TCP echo server is running (should only be set for debugging)']]
-
-class HTTPInvalidRequestLine(tcpt.TCPTest):
-    """
-    The goal of this test is to do some very basic and not very noisy fuzzing
-    on the HTTP request line. We generate a series of requests that are not
-    valid HTTP requests.
-
-    Unless elsewhere stated 'Xx'*N refers to N*2 random upper or lowercase
-    ascii letters or numbers ('XxXx' will be 4).
-    """
-    name = "HTTP Invalid Request Line"
-    version = "0.1.4"
-    authors = "Arturo Filastò"
-
-    usageOptions = UsageOptions
-    requiredOptions = ['backend']
-
-    def setUp(self):
-        self.port = int(self.localOptions['backendport'])
-        self.address = self.localOptions['backend']
-
-    def check_for_manipulation(self, response, payload):
-        log.debug("Checking if %s == %s" % (response, payload))
-        if response != payload:
-            self.report['tampering'] = True
-        else:
-            self.report['tampering'] = False
-
-    def test_random_invalid_method(self):
-        """
-        We test sending data to a TCP echo server listening on port 80, if what
-        we get back is not what we have sent then there is tampering going on.
-        This is for example what squid will return when performing such
-        request:
-
-            HTTP/1.0 400 Bad Request
-            Server: squid/2.6.STABLE21
-            Date: Sat, 23 Jul 2011 02:22:44 GMT
-            Content-Type: text/html
-            Content-Length: 1178
-            Expires: Sat, 23 Jul 2011 02:22:44 GMT
-            X-Squid-Error: ERR_INVALID_REQ 0
-            X-Cache: MISS from cache_server
-            X-Cache-Lookup: NONE from cache_server:3128
-            Via: 1.0 cache_server:3128 (squid/2.6.STABLE21)
-            Proxy-Connection: close
-
-        """
-        payload = randomSTR(4) + " / HTTP/1.1\n\r"
-
-        d = self.sendPayload(payload)
-        d.addCallback(self.check_for_manipulation, payload)
-        return d
-
-    def test_random_invalid_field_count(self):
-        """
-        This generates a request that looks like this:
-
-        XxXxX XxXxX XxXxX XxXxX
-
-        This may trigger some bugs in the HTTP parsers of transparent HTTP
-        proxies.
-        """
-        payload = ' '.join(randomStr(5) for x in range(4))
-        payload += "\n\r"
-
-        d = self.sendPayload(payload)
-        d.addCallback(self.check_for_manipulation, payload)
-        return d
-
-    def test_random_big_request_method(self):
-        """
-        This generates a request that looks like this:
-
-        Xx*512 / HTTP/1.1
-        """
-        payload = randomStr(1024) + ' / HTTP/1.1\n\r'
-
-        d = self.sendPayload(payload)
-        d.addCallback(self.check_for_manipulation, payload)
-        return d
-
-    def test_random_invalid_version_number(self):
-        """
-        This generates a request that looks like this:
-
-        GET / HTTP/XxX
-        """
-        payload = 'GET / HTTP/' + randomStr(3)
-        payload += '\n\r'
-
-        d = self.sendPayload(payload)
-        d.addCallback(self.check_for_manipulation, payload)
-        return d
-
diff --git a/nettests/manipulation/traceroute.py b/nettests/manipulation/traceroute.py
deleted file mode 100644
index 3f6f17b..0000000
--- a/nettests/manipulation/traceroute.py
+++ /dev/null
@@ -1,143 +0,0 @@
-# -*- encoding: utf-8 -*-
-#
-# :authors: Arturo Filastò
-# :licence: see LICENSE
-
-from twisted.python import usage
-from twisted.internet import defer
-
-from ooni.templates import scapyt
-
-from scapy.all import *
-
-from ooni.utils import log
-
-class UsageOptions(usage.Options):
-    optParameters = [
-                    ['backend', 'b', '8.8.8.8', 'Test backend to use'],
-                    ['timeout', 't', 5, 'The timeout for the traceroute test'],
-                    ['maxttl', 'm', 30, 'The maximum value of ttl to set on packets'],
-                    ['srcport', 'p', None, 'Set the source port to a specific value (only applies to TCP and UDP)']
-                    ]
-
-class TracerouteTest(scapyt.BaseScapyTest):
-    name = "Multi Protocol Traceroute Test"
-    author = "Arturo Filastò"
-    version = "0.1.1"
-
-    usageOptions = UsageOptions
-    dst_ports = [0, 22, 23, 53, 80, 123, 443, 8080, 65535]
-
-    def setUp(self):
-        def get_sport(protocol):
-            if self.localOptions['srcport']:
-                return int(self.localOptions['srcport'])
-            else:
-                return random.randint(1024, 65535)
-
-        self.get_sport = get_sport
-
-    def max_ttl_and_timeout(self):
-        max_ttl = int(self.localOptions['maxttl'])
-        timeout = int(self.localOptions['timeout'])
-        self.report['max_ttl'] = max_ttl
-        self.report['timeout'] = timeout
-        return max_ttl, timeout
-
-
-    def postProcessor(self, report):
-        tcp_hops = report['test_tcp_traceroute']
-        udp_hops = report['test_udp_traceroute']
-        icmp_hops = report['test_icmp_traceroute']
-
-
-    def test_tcp_traceroute(self):
-        """
-        Does a traceroute to the destination by sending TCP SYN packets
-        with TTLs from 1 until max_ttl.
-        """
-        def finished(packets, port):
-            log.debug("Finished running TCP traceroute test on port %s" % port)
-            answered, unanswered = packets
-            self.report['hops_'+str(port)] = []
-            for snd, rcv in answered:
-                try:
-                    sport = snd[UDP].sport
-                except IndexError:
-                    log.err("Source port for this traceroute was not found. This is probably a bug")
-                    sport = -1
-
-                report = {'ttl': snd.ttl,
-                        'address': rcv.src,
-                        'rtt': rcv.time - snd.time,
-                        'sport': sport
-                }
-                log.debug("%s: %s" % (port, report))
-                self.report['hops_'+str(port)].append(report)
-
-        dl = []
-        max_ttl, timeout = self.max_ttl_and_timeout()
-        for port in self.dst_ports:
-            packets = IP(dst=self.localOptions['backend'],
-                    ttl=(1,max_ttl),id=RandShort())/TCP(flags=0x2, dport=port,
-                            sport=self.get_sport('tcp'))
-
-            d = self.sr(packets, timeout=timeout)
-            d.addCallback(finished, port)
-            dl.append(d)
-        return defer.DeferredList(dl)
-
-    def test_udp_traceroute(self):
-        """
-        Does a traceroute to the destination by sending UDP packets with empty
-        payloads with TTLs from 1 until max_ttl.
-        """
-        def finished(packets, port):
-            log.debug("Finished running UDP traceroute test on port %s" % port)
-            answered, unanswered = packets
-            self.report['hops_'+str(port)] = []
-            for snd, rcv in answered:
-                report = {'ttl': snd.ttl,
-                        'address': rcv.src,
-                        'rtt': rcv.time - snd.time,
-                        'sport': snd[UDP].sport
-                }
-                log.debug("%s: %s" % (port, report))
-                self.report['hops_'+str(port)].append(report)
-        dl = []
-        max_ttl, timeout = self.max_ttl_and_timeout()
-        for port in self.dst_ports:
-            packets = IP(dst=self.localOptions['backend'],
-                    ttl=(1,max_ttl),id=RandShort())/UDP(dport=port,
-                            sport=self.get_sport('udp'))
-
-            d = self.sr(packets, timeout=timeout)
-            d.addCallback(finished, port)
-            dl.append(d)
-        return defer.DeferredList(dl)
-
-    def test_icmp_traceroute(self):
-        """
-        Does a traceroute to the destination by sending ICMP echo request
-        packets with TTLs from 1 until max_ttl.
-        """
-        def finished(packets):
-            log.debug("Finished running ICMP traceroute test")
-            answered, unanswered = packets
-            self.report['hops'] = []
-            for snd, rcv in answered:
-                report = {'ttl': snd.ttl,
-                        'address': rcv.src,
-                        'rtt': rcv.time - snd.time
-                }
-                log.debug("%s" % (report))
-                self.report['hops'].append(report)
-        dl = []
-        max_ttl, timeout = self.max_ttl_and_timeout()
-        packets = IP(dst=self.localOptions['backend'],
-                    ttl=(1,max_ttl), id=RandShort())/ICMP()
-
-        d = self.sr(packets, timeout=timeout)
-        d.addCallback(finished)
-        return d
-
diff --git a/nettests/scanning/__init__.py b/nettests/scanning/__init__.py
deleted file mode 100644
index e69de29..0000000
diff --git a/nettests/scanning/http_url_list.py b/nettests/scanning/http_url_list.py
deleted file mode 100644
index 0accaae..0000000
--- a/nettests/scanning/http_url_list.py
+++ /dev/null
@@ -1,98 +0,0 @@
-# -*- encoding: utf-8 -*-
-#
-# :authors: Arturo Filastò
-# :licence: see LICENSE
-
-from twisted.internet import defer
-from twisted.python import usage
-from ooni.templates import httpt
-from ooni.utils import log
-
-class UsageOptions(usage.Options):
-    optParameters = [['content', 'c', None,
-                        'The file to read from containing the content of a block page'],
-                     ['url', 'u', None, 'Specify a single URL to test.']
-                    ]
-
-class HTTPURLList(httpt.HTTPTest):
-    """
-    Performs GET, POST and PUT requests to a list of URLs specified as
-    input and checks if the page that we get back as a result matches that
-    of a block page given as input.
-
-    If no block page is given as input to the test it will simply collect the
-    responses to the HTTP requests and write them to a report file.
-    """
-    name = "HTTP URL List"
-    author = "Arturo Filastò"
-    version = "0.1.3"
-
-    usageOptions = UsageOptions
-
-    inputFile = ['file', 'f', None, 
-            'List of URLS to perform GET and POST requests to']
-
-    def setUp(self):
-        """
-        Check for inputs.
-        """
-        if self.input:
-            self.url = self.input
-        elif self.localOptions['url']:
-            self.url = self.localOptions['url']
-        else:
-            raise Exception("No input specified")
-
-    def check_for_content_censorship(self, body):
-        """
-        If we have specified what a censorship page looks like here we will
-        check if the page we are looking at matches it.
-
-        XXX this is not tested, though it is basically what was used to detect
-        censorship in the palestine case.
-        """
-        self.report['censored'] = True
-
-        censorship_page = open(self.localOptions['content']).xreadlines()
-        response_page = iter(body.split("\n"))
-
-        # We first allign the two pages to the first HTML tag (something
-        # starting with <). This is useful so that we can give as input to this
-        # test something that comes from the output of curl -kis
-        # http://the_page/
-        for line in censorship_page:
-            if line.strip().startswith("<"):
-                break
-        for line in response_page:
-            if line.strip().startswith("<"):
-                break
-
-        for censorship_line in censorship_page:
-            try:
-                response_line = response_page.next()
-            except StopIteration:
-                # The censored page and the response we got do not match in
-                # length.
-                self.report['censored'] = False
-                break
-            censorship_line = censorship_line.replace("\n", "")
-            if response_line != censorship_line:
-                self.report['censored'] = False
-
-        censorship_page.close()
-
-    def processResponseBody(self, body):
-        if self.localOptions['content']:
-            log.msg("Checking for censorship in response body")
-            self.check_for_content_censorship(body)
-
-    def test_get(self):
-        return self.doRequest(self.url, method="GET")
-
-    def test_post(self):
-        return self.doRequest(self.url, method="POST")
-
-    def test_put(self):
-        return self.doRequest(self.url, method="PUT")
-
-
diff --git a/nettests/third_party/Makefile b/nettests/third_party/Makefile
deleted file mode 100644
index 16adfe0..0000000
--- a/nettests/third_party/Makefile
+++ /dev/null
@@ -1,3 +0,0 @@
-fetch:
-	wget http://netalyzr.icsi.berkeley.edu/NetalyzrCLI.jar
-	chmod +x NetalyzrCLI.jar
diff --git a/nettests/third_party/README b/nettests/third_party/README
deleted file mode 100644
index d9e435f..0000000
--- a/nettests/third_party/README
+++ /dev/null
@@ -1,14 +0,0 @@
-There is no license for NetalyzrCLI.jar; so while we include it, it's just
-for ease of use.
-
-We currently support interfacing with the ICSI Netalyzr system by wrapping
-the NetalyzrCLI.jar client. It was downloaded on August 5th, 2011 from the
-following URL:
-  http://netalyzr.icsi.berkeley.edu/NetalyzrCLI.jar
-
-More information about the client is available on the cli web page:
-  http://netalyzr.icsi.berkeley.edu/cli.html
-
-After looking at NetalyzrCLI.jar, I discovered that '-d' runs it in a
-debugging mode that is quite useful for understanding their testing
-framework as it runs.
diff --git a/nettests/third_party/__init__.py b/nettests/third_party/__init__.py
deleted file mode 100644
index e69de29..0000000
diff --git a/nettests/third_party/netalyzr.py b/nettests/third_party/netalyzr.py
deleted file mode 100644
index 9b21831..0000000
--- a/nettests/third_party/netalyzr.py
+++ /dev/null
@@ -1,58 +0,0 @@
-# -*- encoding: utf-8 -*-
-#
-# This is a wrapper around the Netalyzer Java command line client
-#
-# :authors: Jacob Appelbaum <jacob@xxxxxxxxxxxxx>
-#           Arturo "hellais" Filastò <art@xxxxxxxxx>
-# :licence: see LICENSE
-
-from ooni import nettest
-from ooni.utils import log
-import time
-import os
-from twisted.internet import reactor, threads, defer
-
-class NetalyzrWrapperTest(nettest.NetTestCase):
-    name = "NetalyzrWrapper"
-
-    def setUp(self):
-        cwd = os.path.abspath(os.path.join(os.path.abspath(__file__), '..'))
-
-        # XXX set the output directory to something more uniform
-        outputdir = os.path.join(cwd, '..', '..')
-
-        program_path = os.path.join(cwd, 'NetalyzrCLI.jar')
-        program = "java -jar %s -d" % program_path
-
-        test_token = time.asctime(time.gmtime()).replace(" ", "_").strip()
-
-        self.output_file = os.path.join(outputdir,
-                "NetalyzrCLI_" + test_token + ".out")
-        self.output_file.strip()
-        self.run_me = program + " 2>&1 >> " + self.output_file
-
-    def blocking_call(self):
-        try:
-            result = threads.blockingCallFromThread(reactor, os.system, self.run_me) 
-        except:
-            log.debug("Netalyzr had an error, please see the log file: %s" % self.output_file)
-        finally:
-            self.clean_up()
-
-    def clean_up(self):
-        self.report['netalyzr_report'] = self.output_file
-        log.debug("finished running NetalzrWrapper")
-        log.debug("Please check %s for Netalyzr output" % self.output_file)
-
-    def test_run_netalyzr(self):
-        """
-        This test simply wraps netalyzr and runs it from command line
-        """
-        log.msg("Running NetalyzrWrapper (this will take some time, be patient)")
-        log.debug("with command '%s'" % self.run_me)
-        # XXX we probably want to use a processprotocol here to obtain the
-        # stdout from Netalyzr. This would allows us to visualize progress
-        # (currently there is no progress because the stdout of os.system is
-        # trapped by twisted) and to include the link to the netalyzr report
-        # directly in the OONI report, perhaps even downloading it.
-        reactor.callInThread(self.blocking_call)



_______________________________________________
tor-commits mailing list
tor-commits@xxxxxxxxxxxxxxxxxxxx
https://lists.torproject.org/cgi-bin/mailman/listinfo/tor-commits