[Author Prev][Author Next][Thread Prev][Thread Next][Author Index][Thread Index]
[tor-commits] [ooni-probe/master] Refactor directory of nettests
commit fe417b1a021900c607d828cdb0bd06b39bb0ee34
Author: Arturo Filastò <art@xxxxxxxxx>
Date: Wed Nov 28 17:47:55 2012 +0100
Refactor directory of nettests
* Make a selection of tests we are interested in stabilizing
* Move tests that have to do with blocking to blocking
* Move tests that have to do with tampering to tampering
* Move tests that are not well tested to experimental
---
nettests/blocking/http_body_length.py | 90 +++
nettests/blocking/http_invalid_requests.py | 63 ++
nettests/blocking/tcpconnect.py | 51 ++
nettests/bridge_reachability/bridget.py | 462 --------------
nettests/bridge_reachability/echo.py | 132 ----
nettests/core/captiveportal.py | 644 --------------------
nettests/core/chinatrigger.py | 108 ----
nettests/core/daphn3.py | 119 ----
nettests/core/dnsspoof.py | 69 ---
nettests/core/http_body_length.py | 90 ---
nettests/core/http_header_field_manipulation.py | 181 ------
nettests/core/http_host.py | 109 ----
nettests/core/http_invalid_requests.py | 63 --
nettests/core/http_keyword_filtering.py | 45 --
nettests/core/http_uk_mobile_networks.py | 85 ---
nettests/core/http_url_list.py | 98 ---
nettests/core/keyword_filtering.py | 52 --
nettests/core/parasitictraceroute.py | 129 ----
nettests/core/squid.py | 117 ----
nettests/core/tcpconnect.py | 51 --
nettests/core/traceroute.py | 137 -----
.../experimental/bridge_reachability/bridget.py | 462 ++++++++++++++
nettests/experimental/bridge_reachability/echo.py | 132 ++++
nettests/experimental/chinatrigger.py | 108 ++++
nettests/experimental/daphn3.py | 119 ++++
nettests/experimental/http_keyword_filtering.py | 45 ++
nettests/experimental/http_uk_mobile_networks.py | 85 +++
nettests/experimental/keyword_filtering.py | 52 ++
nettests/experimental/parasitictraceroute.py | 129 ++++
nettests/experimental/squid.py | 117 ++++
nettests/manipulation/captiveportal.py | 644 ++++++++++++++++++++
nettests/manipulation/dnsspoof.py | 69 +++
.../manipulation/http_header_field_manipulation.py | 181 ++++++
nettests/manipulation/http_host.py | 109 ++++
nettests/manipulation/traceroute.py | 137 +++++
nettests/scanning/http_url_list.py | 98 +++
ooni/runner.py | 5 +-
37 files changed, 2695 insertions(+), 2692 deletions(-)
diff --git a/nettests/blocking/http_body_length.py b/nettests/blocking/http_body_length.py
new file mode 100644
index 0000000..7c5b0d2
--- /dev/null
+++ b/nettests/blocking/http_body_length.py
@@ -0,0 +1,90 @@
+# -*- encoding: utf-8 -*-
+#
+# :authors: Arturo Filastò
+# :licence: see LICENSE
+
+from twisted.internet import defer
+from twisted.python import usage
+from ooni.templates import httpt
+
+class UsageOptions(usage.Options):
+ optParameters = [
+ ['url', 'u', None, 'Specify a single URL to test.'],
+ ['factor', 'f', 0.8, 'What factor should be used for triggering censorship (0.8 == 80%)']
+ ]
+
+class HTTPBodyLength(httpt.HTTPTest):
+ """
+ Performs a two GET requests to the set of sites to be tested for
+ censorship, one over a known good control channel (Tor), the other over the
+ test network.
+ We then look at the response body lengths and see if the control response
+ differs from the experiment response by a certain factor.
+ """
+ name = "HTTP Body length test"
+ author = "Arturo Filastò"
+ version = "0.1"
+
+ usageOptions = UsageOptions
+
+ inputFile = ['file', 'f', None,
+ 'List of URLS to perform GET and POST requests to']
+
+ # These values are used for determining censorship based on response body
+ # lengths
+ control_body_length = None
+ experiment_body_length = None
+
+ def setUp(self):
+ """
+ Check for inputs.
+ """
+ if self.input:
+ self.url = self.input
+ elif self.localOptions['url']:
+ self.url = self.localOptions['url']
+ else:
+ raise Exception("No input specified")
+
+ self.factor = self.localOptions['factor']
+
+ def compare_body_lengths(self):
+ body_length_a = self.control_body_length
+ body_length_b = self.experiment_body_length
+
+ rel = float(body_length_a)/float(body_length_b)
+ if rel > 1:
+ rel = 1/rel
+
+ self.report['body_proportion'] = rel
+ self.report['factor'] = self.factor
+ if rel < self.factor:
+ self.report['censorship'] = True
+ else:
+ self.report['censorship'] = False
+
+ def test_get(self):
+ def errback(failure):
+ log.err("There was an error while testing %s" % self.url)
+ log.exception(failure)
+
+ def control_body(result):
+ self.control_body_length = len(result)
+ if self.experiment_body_length:
+ self.compare_body_lengths()
+
+ def experiment_body(result):
+ self.experiment_body_length = len(result)
+ if self.control_body_length:
+ self.compare_body_lengths()
+
+ dl = []
+ experiment_request = self.doRequest(self.url, method="GET",
+ body_processor=experiment_body)
+ control_request = self.doRequest(self.url, method="GET",
+ use_tor=True, body_processor=control_body)
+ dl.append(experiment_request)
+ dl.append(control_request)
+ d = defer.DeferredList(dl)
+ return d
+
diff --git a/nettests/blocking/http_invalid_requests.py b/nettests/blocking/http_invalid_requests.py
new file mode 100644
index 0000000..7e6f47f
--- /dev/null
+++ b/nettests/blocking/http_invalid_requests.py
@@ -0,0 +1,63 @@
+# -*- encoding: utf-8 -*-
+from twisted.python import usage
+
+from ooni.utils import randomStr
+from ooni.templates import tcpt
+
+class UsageOptions(usage.Options):
+ optParameters = [['backend', 'b', '127.0.0.1:57002',
+ 'The OONI backend that runs a TCP echo server (must be on port 80)']]
+
+ optFlags = [['nopayloadmatch', 'n',
+ "Don't match the payload of the response. This option is used when you don't have a TCP echo server running"]]
+
+class HTTPInvalidRequests(tcpt.TCPTest):
+ name = "HTTP Invalid Requests"
+ version = "0.1.1"
+ authors = "Arturo Filastò"
+
+ inputFile = ['file', 'f', None,
+ 'Input file of list of hostnames to attempt to resolve']
+
+ usageOptions = UsageOptions
+ requiredOptions = ['backend']
+
+ def setUp(self):
+ try:
+ self.address, self.port = self.localOptions['backend'].split(":")
+ self.port = int(self.port)
+ except:
+ raise usage.UsageError("Invalid backend address specified (must be address:port)")
+
+ def test_random_invalid_request(self):
+ """
+ We test sending data to a TCP echo server, if what we get back is not
+ what we have sent then there is tampering going on.
+ This is for example what squid will return when performing such
+ request:
+
+ HTTP/1.0 400 Bad Request
+ Server: squid/2.6.STABLE21
+ Date: Sat, 23 Jul 2011 02:22:44 GMT
+ Content-Type: text/html
+ Content-Length: 1178
+ Expires: Sat, 23 Jul 2011 02:22:44 GMT
+ X-Squid-Error: ERR_INVALID_REQ 0
+ X-Cache: MISS from cache_server
+ X-Cache-Lookup: NONE from cache_server:3128
+ Via: 1.0 cache_server:3128 (squid/2.6.STABLE21)
+ Proxy-Connection: close
+
+ """
+ payload = randomStr(10) + "\n\r"
+ def got_all_data(received_array):
+ if not self.localOptions['nopayloadmatch']:
+ first = received_array[0]
+ if first != payload:
+ self.report['tampering'] = True
+ else:
+ self.report['tampering'] = 'unknown'
+
+ d = self.sendPayload(payload)
+ d.addCallback(got_all_data)
+ return d
diff --git a/nettests/blocking/tcpconnect.py b/nettests/blocking/tcpconnect.py
new file mode 100644
index 0000000..d0a53f8
--- /dev/null
+++ b/nettests/blocking/tcpconnect.py
@@ -0,0 +1,51 @@
+# -*- encoding: utf-8 -*-
+from twisted.internet.protocol import Factory, Protocol
+from twisted.internet.endpoints import TCP4ClientEndpoint
+
+from twisted.internet.error import ConnectionRefusedError
+from twisted.internet.error import TCPTimedOutError
+
+from ooni import nettest
+from ooni.utils import log
+
+class TCPFactory(Factory):
+ def buildProtocol(self, addr):
+ return Protocol()
+
+class TCPConnectTest(nettest.NetTestCase):
+ name = "TCP Connect"
+ author = "Arturo Filastò"
+ version = "0.1"
+ inputFile = ['file', 'f', None,
+ 'File containing the IP:PORT combinations to be tested, one per line']
+
+ requiredOptions = ['file']
+ def test_connect(self):
+ """
+ This test performs a TCP connection to the remote host on the specified port.
+ the report will contains the string 'success' if the test has
+ succeeded, or the reason for the failure if it has failed.
+ """
+ host, port = self.input.split(":")
+ def connectionSuccess(protocol):
+ protocol.transport.loseConnection()
+ log.debug("Got a connection to %s" % self.input)
+ self.report["connection"] = 'success'
+
+ def connectionFailed(failure):
+ failure.trap(ConnectionRefusedError, TCPTimedOutError)
+ log.debug("Unable to connect to %s" % self.input)
+ if isinstance(failure, ConnectionRefusedError):
+ self.report["connection"] = 'refused'
+ elif isinstance(failure, TCPTimedOutError):
+ self.report["connection"] = 'timeout'
+ else:
+ self.report["connection"] = 'failed'
+
+ from twisted.internet import reactor
+ point = TCP4ClientEndpoint(reactor, host, int(port))
+ d = point.connect(TCPFactory())
+ d.addCallback(connectionSuccess)
+ d.addErrback(connectionFailed)
+ return d
+
diff --git a/nettests/bridge_reachability/bridget.py b/nettests/bridge_reachability/bridget.py
deleted file mode 100644
index acf3dff..0000000
--- a/nettests/bridge_reachability/bridget.py
+++ /dev/null
@@ -1,462 +0,0 @@
-#!/usr/bin/env python
-# -*- encoding: utf-8 -*-
-#
-# +-----------+
-# | BRIDGET |
-# | +--------------------------------------------+
-# +--------| Use a Tor process to test making a Tor |
-# | connection to a list of bridges or relays. |
-# +--------------------------------------------+
-#
-# :authors: Isis Lovecruft, Arturo Filasto
-# :licence: see included LICENSE
-# :version: 0.1.0-alpha
-
-from __future__ import with_statement
-from functools import partial
-from random import randint
-
-import os
-import sys
-
-from twisted.python import usage
-from twisted.internet import defer, error, reactor
-
-from ooni import nettest
-
-from ooni.utils import log, date
-from ooni.utils.config import ValueChecker
-
-from ooni.utils.onion import TxtorconImportError
-from ooni.utils.onion import PTNoBridgesException, PTNotFoundException
-
-
-try:
- from ooni.utils.onion import parse_data_dir
-except:
- log.msg("Please go to /ooni/lib and do 'make txtorcon' to run this test!")
-
-class MissingAssetException(Exception):
- pass
-
-class RandomPortException(Exception):
- """Raised when using a random port conflicts with configured ports."""
- def __init__(self):
- log.msg("Unable to use random and specific ports simultaneously")
- return sys.exit()
-
-class BridgetArgs(usage.Options):
- """Commandline options."""
- allowed = "Port to use for Tor's %s, must be between 1024 and 65535."
- sock_check = ValueChecker(allowed % "SocksPort").port_check
- ctrl_check = ValueChecker(allowed % "ControlPort").port_check
-
- optParameters = [
- ['bridges', 'b', None,
- 'File listing bridge IP:ORPorts to test'],
- ['relays', 'f', None,
- 'File listing relay IPs to test'],
- ['socks', 's', 9049, None, sock_check],
- ['control', 'c', 9052, None, ctrl_check],
- ['torpath', 'p', None,
- 'Path to the Tor binary to use'],
- ['datadir', 'd', None,
- 'Tor DataDirectory to use'],
- ['transport', 't', None,
- 'Tor ClientTransportPlugin'],
- ['resume', 'r', 0,
- 'Resume at this index']]
- optFlags = [['random', 'x', 'Use random ControlPort and SocksPort']]
-
- def postOptions(self):
- if not self['bridges'] and not self['relays']:
- raise MissingAssetException(
- "Bridget can't run without bridges or relays to test!")
- if self['transport']:
- ValueChecker.uid_check(
- "Can't run bridget as root with pluggable transports!")
- if not self['bridges']:
- raise PTNoBridgesException
- if self['socks'] or self['control']:
- if self['random']:
- raise RandomPortException
- if self['datadir']:
- ValueChecker.dir_check(self['datadir'])
- if self['torpath']:
- ValueChecker.file_check(self['torpath'])
-
-class BridgetTest(nettest.NetTestCase):
- """
- XXX fill me in
-
- :ivar config:
- An :class:`ooni.lib.txtorcon.TorConfig` instance.
- :ivar relays:
- A list of all provided relays to test.
- :ivar bridges:
- A list of all provided bridges to test.
- :ivar socks_port:
- Integer for Tor's SocksPort.
- :ivar control_port:
- Integer for Tor's ControlPort.
- :ivar transport:
- String defining the Tor's ClientTransportPlugin, for testing
- a bridge's pluggable transport functionality.
- :ivar tor_binary:
- Path to the Tor binary to use, e.g. \'/usr/sbin/tor\'
- """
- name = "bridget"
- author = "Isis Lovecruft <isis@xxxxxxxxxxxxxx>"
- version = "0.1"
- description = "Use a Tor process to test connecting to bridges or relays"
- usageOptions = BridgetArgs
-
- def setUp(self):
- """
- Extra initialization steps. We only want one child Tor process
- running, so we need to deal with most of the TorConfig() only once,
- before the experiment runs.
- """
- self.socks_port = 9049
- self.control_port = 9052
- self.circuit_timeout = 90
- self.tor_binary = '/usr/sbin/tor'
- self.data_directory = None
-
- def read_from_file(filename):
- log.msg("Loading information from %s ..." % opt)
- with open(filename) as fp:
- lst = []
- for line in fp.readlines():
- if line.startswith('#'):
- continue
- else:
- lst.append(line.replace('\n',''))
- return lst
-
- def __count_remaining__(which):
- total, reach, unreach = map(lambda x: which[x],
- ['all', 'reachable', 'unreachable'])
- count = len(total) - reach() - unreach()
- return count
-
- ## XXX should we do report['bridges_up'].append(self.bridges['current'])
- self.bridges = {}
- self.bridges['all'], self.bridges['up'], self.bridges['down'] = \
- ([] for i in range(3))
- self.bridges['reachable'] = lambda: len(self.bridges['up'])
- self.bridges['unreachable'] = lambda: len(self.bridges['down'])
- self.bridges['remaining'] = lambda: __count_remaining__(self.bridges)
- self.bridges['current'] = None
- self.bridges['pt_type'] = None
- self.bridges['use_pt'] = False
-
- self.relays = {}
- self.relays['all'], self.relays['up'], self.relays['down'] = \
- ([] for i in range(3))
- self.relays['reachable'] = lambda: len(self.relays['up'])
- self.relays['unreachable'] = lambda: len(self.relays['down'])
- self.relays['remaining'] = lambda: __count_remaining__(self.relays)
- self.relays['current'] = None
-
- if self.localOptions:
- try:
- from txtorcon import TorConfig
- except ImportError:
- raise TxtorconImportError
- else:
- self.config = TorConfig()
- finally:
- options = self.localOptions
-
- if options['bridges']:
- self.config.UseBridges = 1
- self.bridges['all'] = read_from_file(options['bridges'])
- if options['relays']:
- ## first hop must be in TorState().guards
- # XXX where is this defined?
- self.config.EntryNodes = ','.join(relay_list)
- self.relays['all'] = read_from_file(options['relays'])
- if options['socks']:
- self.socks_port = options['socks']
- if options['control']:
- self.control_port = options['control']
- if options['random']:
- log.msg("Using randomized ControlPort and SocksPort ...")
- self.socks_port = randint(1024, 2**16)
- self.control_port = randint(1024, 2**16)
- if options['torpath']:
- self.tor_binary = options['torpath']
- if options['datadir']:
- self.data_directory = parse_data_dir(options['datadir'])
- if options['transport']:
- ## ClientTransportPlugin transport exec pathtobinary [options]
- ## XXX we need a better way to deal with all PTs
- log.msg("Using ClientTransportPlugin %s" % options['transport'])
- self.bridges['use_pt'] = True
- [self.bridges['pt_type'], pt_exec] = \
- options['transport'].split(' ', 1)
-
- if self.bridges['pt_type'] == "obfs2":
- self.config.ClientTransportPlugin = \
- self.bridges['pt_type'] + " " + pt_exec
- else:
- raise PTNotFoundException
-
- self.config.SocksPort = self.socks_port
- self.config.ControlPort = self.control_port
- self.config.CookieAuthentication = 1
-
- def test_bridget(self):
- """
- if bridges:
- 1. configure first bridge line
- 2a. configure data_dir, if it doesn't exist
- 2b. write torrc to a tempfile in data_dir
- 3. start tor } if any of these
- 4. remove bridges which are public relays } fail, add current
- 5. SIGHUP for each bridge } bridge to unreach-
- } able bridges.
- if relays:
- 1a. configure the data_dir, if it doesn't exist
- 1b. write torrc to a tempfile in data_dir
- 2. start tor
- 3. remove any of our relays which are already part of current
- circuits
- 4a. attach CustomCircuit() to self.state
- 4b. RELAY_EXTEND for each relay } if this fails, add
- } current relay to list
- } of unreachable relays
- 5.
- if bridges and relays:
- 1. configure first bridge line
- 2a. configure data_dir if it doesn't exist
- 2b. write torrc to a tempfile in data_dir
- 3. start tor
- 4. remove bridges which are public relays
- 5. remove any of our relays which are already part of current
- circuits
- 6a. attach CustomCircuit() to self.state
- 6b. for each bridge, build three circuits, with three
- relays each
- 6c. RELAY_EXTEND for each relay } if this fails, add
- } current relay to list
- } of unreachable relays
-
- :param args:
- The :class:`BridgetAsset` line currently being used. Except that it
- in Bridget it doesn't, so it should be ignored and avoided.
- """
- try:
- from ooni.utils import process
- from ooni.utils.onion import remove_public_relays, start_tor
- from ooni.utils.onion import start_tor_filter_nodes
- from ooni.utils.onion import setup_fail, setup_done
- from ooni.utils.onion import CustomCircuit
- from ooni.utils.timer import deferred_timeout, TimeoutError
- from ooni.lib.txtorcon import TorConfig, TorState
- except ImportError:
- raise TxtorconImportError
- except TxtorconImportError, tie:
- log.err(tie)
- sys.exit()
-
- def reconfigure_done(state, bridges):
- """
- Append :ivar:`bridges['current']` to the list
- :ivar:`bridges['up'].
- """
- log.msg("Reconfiguring with 'Bridge %s' successful"
- % bridges['current'])
- bridges['up'].append(bridges['current'])
- return state
-
- def reconfigure_fail(state, bridges):
- """
- Append :ivar:`bridges['current']` to the list
- :ivar:`bridges['down'].
- """
- log.msg("Reconfiguring TorConfig with parameters %s failed"
- % state)
- bridges['down'].append(bridges['current'])
- return state
-
- @defer.inlineCallbacks
- def reconfigure_bridge(state, bridges):
- """
- Rewrite the Bridge line in our torrc. If use of pluggable
- transports was specified, rewrite the line as:
- Bridge <transport_type> <IP>:<ORPort>
- Otherwise, rewrite in the standard form:
- Bridge <IP>:<ORPort>
-
- :param state:
- A fully bootstrapped instance of
- :class:`ooni.lib.txtorcon.TorState`.
- :param bridges:
- A dictionary of bridges containing the following keys:
-
- bridges['remaining'] :: A function returning and int for the
- number of remaining bridges to test.
- bridges['current'] :: A string containing the <IP>:<ORPort>
- of the current bridge.
- bridges['use_pt'] :: A boolean, True if we're testing
- bridges with a pluggable transport;
- False otherwise.
- bridges['pt_type'] :: If :ivar:`bridges['use_pt'] is True,
- this is a string containing the type
- of pluggable transport to test.
- :return:
- :param:`state`
- """
- log.msg("Current Bridge: %s" % bridges['current'])
- log.msg("We now have %d bridges remaining to test..."
- % bridges['remaining']())
- try:
- if bridges['use_pt'] is False:
- controller_response = yield state.protocol.set_conf(
- 'Bridge', bridges['current'])
- elif bridges['use_pt'] and bridges['pt_type'] is not None:
- controller_reponse = yield state.protocol.set_conf(
- 'Bridge', bridges['pt_type'] +' '+ bridges['current'])
- else:
- raise PTNotFoundException
-
- if controller_response == 'OK':
- finish = yield reconfigure_done(state, bridges)
- else:
- log.err("SETCONF for %s responded with error:\n %s"
- % (bridges['current'], controller_response))
- finish = yield reconfigure_fail(state, bridges)
-
- defer.returnValue(finish)
-
- except Exception, e:
- log.err("Reconfiguring torrc with Bridge line %s failed:\n%s"
- % (bridges['current'], e))
- defer.returnValue(None)
-
- def attacher_extend_circuit(attacher, deferred, router):
- ## XXX todo write me
- ## state.attacher.extend_circuit
- raise NotImplemented
- #attacher.extend_circuit
-
- def state_attach(state, path):
- log.msg("Setting up custom circuit builder...")
- attacher = CustomCircuit(state)
- state.set_attacher(attacher, reactor)
- state.add_circuit_listener(attacher)
- return state
-
- ## OLD
- #for circ in state.circuits.values():
- # for relay in circ.path:
- # try:
- # relay_list.remove(relay)
- # except KeyError:
- # continue
- ## XXX how do we attach to circuits with bridges?
- d = defer.Deferred()
- attacher.request_circuit_build(d)
- return d
-
- def state_attach_fail(state):
- log.err("Attaching custom circuit builder failed: %s" % state)
-
- log.msg("Bridget: initiating test ... ") ## Start the experiment
-
- ## if we've at least one bridge, and our config has no 'Bridge' line
- if self.bridges['remaining']() >= 1 \
- and not 'Bridge' in self.config.config:
-
- ## configure our first bridge line
- self.bridges['current'] = self.bridges['all'][0]
- self.config.Bridge = self.bridges['current']
- ## avoid starting several
- self.config.save() ## processes
- assert self.config.config.has_key('Bridge'), "No Bridge Line"
-
- ## start tor and remove bridges which are public relays
- from ooni.utils.onion import start_tor_filter_nodes
- state = start_tor_filter_nodes(reactor, self.config,
- self.control_port, self.tor_binary,
- self.data_directory, self.bridges)
- #controller = defer.Deferred()
- #controller.addCallback(singleton_semaphore, tor)
- #controller.addErrback(setup_fail)
- #bootstrap = defer.gatherResults([controller, filter_bridges],
- # consumeErrors=True)
-
- if state is not None:
- log.debug("state:\n%s" % state)
- log.debug("Current callbacks on TorState():\n%s"
- % state.callbacks)
-
- ## if we've got more bridges
- if self.bridges['remaining']() >= 2:
- #all = []
- for bridge in self.bridges['all'][1:]:
- self.bridges['current'] = bridge
- #new = defer.Deferred()
- #new.addCallback(reconfigure_bridge, state, self.bridges)
- #all.append(new)
- #check_remaining = defer.DeferredList(all, consumeErrors=True)
- #state.chainDeferred(check_remaining)
- state.addCallback(reconfigure_bridge, self.bridges)
-
- if self.relays['remaining']() > 0:
- while self.relays['remaining']() >= 3:
- #path = list(self.relays.pop() for i in range(3))
- #log.msg("Trying path %s" % '->'.join(map(lambda node:
- # node, path)))
- self.relays['current'] = self.relays['all'].pop()
- for circ in state.circuits.values():
- for node in circ.path:
- if node == self.relays['current']:
- self.relays['up'].append(self.relays['current'])
- if len(circ.path) < 3:
- try:
- ext = attacher_extend_circuit(state.attacher, circ,
- self.relays['current'])
- ext.addCallback(attacher_extend_circuit_done,
- state.attacher, circ,
- self.relays['current'])
- except Exception, e:
- log.err("Extend circuit failed: %s" % e)
- else:
- continue
-
- #state.callback(all)
- #self.reactor.run()
- return state
-
- def disabled_startTest(self, args):
- """
- Local override of :meth:`OONITest.startTest` to bypass calling
- self.control.
-
- :param args:
- The current line of :class:`Asset`, not used but kept for
- compatibility reasons.
- :return:
- A fired deferred which callbacks :meth:`experiment` and
- :meth:`OONITest.finished`.
- """
- self.start_time = date.now()
- self.d = self.experiment(args)
- self.d.addErrback(log.err)
- self.d.addCallbacks(self.finished, log.err)
- return self.d
-
-## ISIS' NOTES
-## -----------
-## TODO:
-## x cleanup documentation
-## x add DataDirectory option
-## x check if bridges are public relays
-## o take bridge_desc file as input, also be able to give same
-## format as output
-## x Add asynchronous timeout for deferred, so that we don't wait
-## o Add assychronous timout for deferred, so that we don't wait
-## forever for bridges that don't work.
diff --git a/nettests/bridge_reachability/echo.py b/nettests/bridge_reachability/echo.py
deleted file mode 100644
index d4033dd..0000000
--- a/nettests/bridge_reachability/echo.py
+++ /dev/null
@@ -1,132 +0,0 @@
-#!/usr/bin/env python
-# -*- coding: utf-8 -*-
-#
-# +---------+
-# | echo.py |
-# +---------+
-# A simple ICMP-8 ping test.
-#
-# @authors: Isis Lovecruft, <isis@xxxxxxxxxxxxxx>
-# @version: 0.0.2-pre-alpha
-# @license: copyright (c) 2012 Isis Lovecruft
-# see attached LICENCE file
-#
-
-import os
-import sys
-
-from twisted.python import usage
-from twisted.internet import reactor, defer
-from ooni import nettest
-from ooni.utils import log, net, Storage, txscapy
-
-try:
- from scapy.all import IP, ICMP
- from scapy.all import sr1
- from ooni.lib import txscapy
- from ooni.lib.txscapy import txsr, txsend
- from ooni.templates.scapyt import BaseScapyTest
-except:
- log.msg("This test requires scapy, see www.secdev.org/projects/scapy")
-
-class UsageOptions(usage.Options):
- optParameters = [
- ['dst', 'd', None, 'Host IP to ping'],
- ['file', 'f', None, 'File of list of IPs to ping'],
- ['interface', 'i', None, 'Network interface to use'],
- ['count', 'c', 1, 'Number of packets to send', int],
- ['size', 's', 56, 'Number of bytes to send in ICMP data field', int],
- ['ttl', 'l', 25, 'Set the IP Time to Live', int],
- ['timeout', 't', 2, 'Seconds until timeout if no response', int],
- ['pcap', 'p', None, 'Save pcap to this file'],
- ['receive', 'r', True, 'Receive response packets']]
-
-class EchoTest(nettest.NetTestCase):
- """
- xxx fill me in
- """
- name = 'echo'
- author = 'Isis Lovecruft <isis@xxxxxxxxxxxxxx>'
- description = 'A simple ping test to see if a host is reachable.'
- version = '0.0.2'
- requiresRoot = True
-
- usageOptions = UsageOptions
- #requiredOptions = ['dst']
-
- def setUp(self, *a, **kw):
- self.destinations = {}
-
- if self.localOptions:
- for key, value in self.localOptions.items():
- log.debug("setting self.%s = %s" % (key, value))
- setattr(self, key, value)
-
- self.timeout *= 1000 ## convert to milliseconds
-
- if not self.interface:
- try:
- iface = txscapy.getDefaultIface()
- except Exception, e:
- log.msg("No network interface specified!")
- log.err(e)
- else:
- log.msg("Using system default interface: %s" % iface)
- self.interface = iface
-
- if self.pcap:
- try:
- self.pcapfile = open(self.pcap, 'a+')
- except:
- log.msg("Unable to write to pcap file %s" % self.pcap)
- else:
- self.pcap = net.capturePacket(self.pcapfile)
-
- if not self.dst:
- if self.file:
- self.dstProcessor(self.file)
- for key, value in self.destinations.items():
- for label, data in value.items():
- if not 'ans' in data:
- self.dst = label
- else:
- self.addDest(self.dst)
- log.debug("self.dst is now: %s" % self.dst)
-
- log.debug("Initialization of %s test completed." % self.name)
-
- def addDest(self, dest):
- d = dest.strip()
- self.destinations[d] = {'dst_ip': d}
-
- def dstProcessor(self, inputfile):
- from ipaddr import IPAddress
-
- if os.path.isfile(inputfile):
- with open(inputfile) as f:
- for line in f.readlines():
- if line.startswith('#'):
- continue
- self.addDest(line)
-
- def test_icmp(self):
- def process_response(echo_reply, dest):
- ans, unans = echo_reply
- if ans:
- log.msg("Recieved echo reply from %s: %s" % (dest, ans))
- else:
- log.msg("No reply was received from %s. Possible censorship event." % dest)
- log.debug("Unanswered packets: %s" % unans)
- self.report[dest] = echo_reply
-
- for label, data in self.destinations.items():
- reply = sr1(IP(dst=lebal)/ICMP())
- process = process_reponse(reply, label)
-
- #(ans, unans) = ping
- #self.destinations[self.dst].update({'ans': ans,
- # 'unans': unans,
- # 'response_packet': ping})
- #return ping
-
- #return reply
diff --git a/nettests/core/captiveportal.py b/nettests/core/captiveportal.py
deleted file mode 100644
index be8da27..0000000
--- a/nettests/core/captiveportal.py
+++ /dev/null
@@ -1,644 +0,0 @@
-# -*- coding: utf-8 -*-
-"""
- captiveportal
- *************
-
- This test is a collection of tests to detect the presence of a
- captive portal. Code is taken, in part, from the old ooni-probe,
- which was written by Jacob Appelbaum and Arturo Filastò.
-
- This module performs multiple tests that match specific vendor captive
- portal tests. This is a basic internet captive portal filter tester written
- for RECon 2011.
-
- Read the following URLs to understand the captive portal detection process
- for various vendors:
-
- http://technet.microsoft.com/en-us/library/cc766017%28WS.10%29.aspx
- http://blog.superuser.com/2011/05/16/windows-7-network-awareness/
- http://isc.sans.org/diary.html?storyid=10312&
- http://src.chromium.org/viewvc/chrome?view=rev&revision=74608
- http://code.google.com/p/chromium-os/issues/detail?3281ttp,
- http://crbug.com/52489
- http://crbug.com/71736
- https://bugzilla.mozilla.org/show_bug.cgi?id=562917
- https://bugzilla.mozilla.org/show_bug.cgi?id=603505
- http://lists.w3.org/Archives/Public/ietf-http-wg/2011JanMar/0086.html
- http://tools.ietf.org/html/draft-nottingham-http-portal-02
-
- :copyright: (c) 2012 Isis Lovecruft
- :license: see LICENSE for more details
-"""
-import base64
-import os
-import random
-import re
-import string
-import urllib2
-from urlparse import urlparse
-
-from twisted.python import usage
-from twisted.internet import defer, threads
-
-from ooni import nettest
-from ooni.templates import httpt
-from ooni.utils import net
-from ooni.utils import log
-
-try:
- from dns import resolver
-except ImportError:
- print "The dnspython module was not found:"
- print "See https://crate.io/packages/dnspython/"
- resolver = None
-
-__plugoo__ = "captiveportal"
-__desc__ = "Captive portal detection test"
-
-class UsageOptions(usage.Options):
- optParameters = [['asset', 'a', None, 'Asset file'],
- ['experiment-url', 'e', 'http://google.com/', 'Experiment URL'],
- ['user-agent', 'u', random.choice(net.userAgents),
- 'User agent for HTTP requests']
- ]
-
-class CaptivePortal(nettest.NetTestCase):
- """
- Compares content and status codes of HTTP responses, and attempts
- to determine if content has been altered.
- """
-
- name = "captivep"
- description = "Captive Portal Test"
- version = '0.2'
- author = "Isis Lovecruft"
- usageOptions = UsageOptions
-
- def http_fetch(self, url, headers={}):
- """
- Parses an HTTP url, fetches it, and returns a urllib2 response
- object.
- """
- url = urlparse(url).geturl()
- request = urllib2.Request(url, None, headers)
- response = urllib2.urlopen(request)
- response_headers = dict(response.headers)
- return response, response_headers
-
- def http_content_match_fuzzy_opt(self, experimental_url, control_result,
- headers=None, fuzzy=False):
- """
- Makes an HTTP request on port 80 for experimental_url, then
- compares the response_content of experimental_url with the
- control_result. Optionally, if the fuzzy parameter is set to
- True, the response_content is compared with a regex of the
- control_result. If the response_content from the
- experimental_url and the control_result match, returns True
- with the HTTP status code and headers; False, status code, and
- headers if otherwise.
- """
-
- if headers is None:
- default_ua = self.local_options['user-agent']
- headers = {'User-Agent': default_ua}
-
- response, response_headers = self.http_fetch(experimental_url, headers)
- response_content = response.read()
- response_code = response.code
- if response_content is None:
- log.warn("HTTP connection appears to have failed.")
- return False, False, False
-
- if fuzzy:
- pattern = re.compile(control_result)
- match = pattern.search(response_content)
- log.msg("Fuzzy HTTP content comparison for experiment URL")
- log.msg("'%s'" % experimental_url)
- if not match:
- log.msg("does not match!")
- return False, response_code, response_headers
- else:
- log.msg("and the expected control result yielded a match.")
- return True, response_code, response_headers
- else:
- if str(response_content) != str(control_result):
- log.msg("HTTP content comparison of experiment URL")
- log.msg("'%s'" % experimental_url)
- log.msg("and the expected control result do not match.")
- return False, response_code, response_headers
- else:
- return True, response_code, response_headers
-
- def http_status_code_match(self, experiment_code, control_code):
- """
- Compare two HTTP status codes, returns True if they match.
- """
- return int(experiment_code) == int(control_code)
-
- def http_status_code_no_match(self, experiment_code, control_code):
- """
- Compare two HTTP status codes, returns True if they do not match.
- """
- return int(experiment_code) != int(control_code)
-
- def dns_resolve(self, hostname, nameserver=None):
- """
- Resolves hostname(s) though nameserver to corresponding
- address(es). hostname may be either a single hostname string,
- or a list of strings. If nameserver is not given, use local
- DNS resolver, and if that fails try using 8.8.8.8.
- """
- if not resolver:
- log.msg("dnspython is not installed.\
- Cannot perform DNS Resolve test")
- return []
- if isinstance(hostname, str):
- hostname = [hostname]
-
- if nameserver is not None:
- res = resolver.Resolver(configure=False)
- res.nameservers = [nameserver]
- else:
- res = resolver.Resolver()
-
- response = []
- answer = None
-
- for hn in hostname:
- try:
- answer = res.query(hn)
- except resolver.NoNameservers:
- res.nameservers = ['8.8.8.8']
- try:
- answer = res.query(hn)
- except resolver.NXDOMAIN:
- log.msg("DNS resolution for %s returned NXDOMAIN" % hn)
- response.append('NXDOMAIN')
- except resolver.NXDOMAIN:
- log.msg("DNS resolution for %s returned NXDOMAIN" % hn)
- response.append('NXDOMAIN')
- finally:
- if not answer:
- return response
- for addr in answer:
- response.append(addr.address)
- return response
-
- def dns_resolve_match(self, experiment_hostname, control_address):
- """
- Resolve experiment_hostname, and check to see that it returns
- an experiment_address which matches the control_address. If
- they match, returns True and experiment_address; otherwise
- returns False and experiment_address.
- """
- experiment_address = self.dns_resolve(experiment_hostname)
- if not experiment_address:
- log.debug("dns_resolve() for %s failed" % experiment_hostname)
- return None, experiment_address
-
- if len(set(experiment_address) & set([control_address])) > 0:
- return True, experiment_address
- else:
- log.msg("DNS comparison of control '%s' does not" % control_address)
- log.msg("match experiment response '%s'" % experiment_address)
- return False, experiment_address
-
- def get_auth_nameservers(self, hostname):
- """
- Many CPs set a nameserver to be used. Let's query that
- nameserver for the authoritative nameservers of hostname.
-
- The equivalent of:
- $ dig +short NS ooni.nu
- """
- if not resolver:
- log.msg("dnspython not installed.")
- log.msg("Cannot perform test.")
- return []
-
- res = resolver.Resolver()
- answer = res.query(hostname, 'NS')
- auth_nameservers = []
- for auth in answer:
- auth_nameservers.append(auth.to_text())
- return auth_nameservers
-
- def hostname_to_0x20(self, hostname):
- """
- MaKEs yOur HOsTnaME lOoK LiKE THis.
-
- For more information, see:
- D. Dagon, et. al. "Increased DNS Forgery Resistance
- Through 0x20-Bit Encoding". Proc. CSS, 2008.
- """
- hostname_0x20 = ''
- for char in hostname:
- l33t = random.choice(['caps', 'nocaps'])
- if l33t == 'caps':
- hostname_0x20 += char.capitalize()
- else:
- hostname_0x20 += char.lower()
- return hostname_0x20
-
- def check_0x20_to_auth_ns(self, hostname, sample_size=None):
- """
- Resolve a 0x20 DNS request for hostname over hostname's
- authoritative nameserver(s), and check to make sure that
- the capitalization in the 0x20 request matches that of the
- response. Also, check the serial numbers of the SOA (Start
- of Authority) records on the authoritative nameservers to
- make sure that they match.
-
- If sample_size is given, a random sample equal to that number
- of authoritative nameservers will be queried; default is 5.
- """
- log.msg("")
- log.msg("Testing random capitalization of DNS queries...")
- log.msg("Testing that Start of Authority serial numbers match...")
-
- auth_nameservers = self.get_auth_nameservers(hostname)
-
- if sample_size is None:
- sample_size = 5
- resolved_auth_ns = random.sample(self.dns_resolve(auth_nameservers),
- sample_size)
-
- querynames = []
- answernames = []
- serials = []
-
- # Even when gevent monkey patching is on, the requests here
- # are sent without being 0x20'd, so we need to 0x20 them.
- hostname = self.hostname_to_0x20(hostname)
-
- for auth_ns in resolved_auth_ns:
- res = resolver.Resolver(configure=False)
- res.nameservers = [auth_ns]
- try:
- answer = res.query(hostname, 'SOA')
- except resolver.Timeout:
- continue
- querynames.append(answer.qname.to_text())
- answernames.append(answer.rrset.name.to_text())
- for soa in answer:
- serials.append(str(soa.serial))
-
- if len(set(querynames).intersection(answernames)) == 1:
- log.msg("Capitalization in DNS queries and responses match.")
- name_match = True
- else:
- log.msg("The random capitalization '%s' used in" % hostname)
- log.msg("DNS queries to that hostname's authoritative")
- log.msg("nameservers does not match the capitalization in")
- log.msg("the response.")
- name_match = False
-
- if len(set(serials)) == 1:
- log.msg("Start of Authority serial numbers all match.")
- serial_match = True
- else:
- log.msg("Some SOA serial numbers did not match the rest!")
- serial_match = False
-
- ret = name_match, serial_match, querynames, answernames, serials
-
- if name_match and serial_match:
- log.msg("Your DNS queries do not appear to be tampered.")
- return ret
- elif name_match or serial_match:
- log.msg("Something is tampering with your DNS queries.")
- return ret
- elif not name_match and not serial_match:
- log.msg("Your DNS queries are definitely being tampered with.")
- return ret
-
- def get_random_url_safe_string(self, length):
- """
- Returns a random url-safe string of specified length, where
- 0 < length <= 256. The returned string will always start with
- an alphabetic character.
- """
- if (length <= 0):
- length = 1
- elif (length > 256):
- length = 256
-
- random_ascii = base64.urlsafe_b64encode(os.urandom(int(length)))
-
- while not random_ascii[:1].isalpha():
- random_ascii = base64.urlsafe_b64encode(os.urandom(int(length)))
-
- three_quarters = int((len(random_ascii)) * (3.0/4.0))
- random_string = random_ascii[:three_quarters]
- return random_string
-
- def get_random_hostname(self, length=None):
- """
- Returns a random hostname with SLD of specified length. If
- length is unspecified, length=32 is used.
-
- These *should* all resolve to NXDOMAIN. If they actually
- resolve to a box that isn't part of a captive portal that
- would be rather interesting.
- """
- if length is None:
- length = 32
-
- random_sld = self.get_random_url_safe_string(length)
-
- # if it doesn't start with a letter, chuck it.
- while not random_sld[:1].isalpha():
- random_sld = self.get_random_url_safe_string(length)
-
- tld_list = ['.com', '.net', '.org', '.info', '.test', '.invalid']
- random_tld = urllib2.random.choice(tld_list)
- random_hostname = random_sld + random_tld
- return random_hostname
-
- def compare_random_hostnames(self, hostname_count=None, hostname_length=None):
- """
- Get hostname_count number of random hostnames with SLD length
- of hostname_length, and then attempt DNS resolution. If no
- arguments are given, default to three hostnames of 32 bytes
- each. These random hostnames *should* resolve to NXDOMAIN,
- except in the case where a user is presented with a captive
- portal and remains unauthenticated, in which case the captive
- portal may return the address of the authentication page.
-
- If the cardinality of the intersection of the set of resolved
- random hostnames and the single element control set
- (['NXDOMAIN']) are equal to one, then DNS properly resolved.
-
- Returns true if only NXDOMAINs were returned, otherwise returns
- False with the relative complement of the control set in the
- response set.
- """
- if hostname_count is None:
- hostname_count = 3
-
- log.msg("Generating random hostnames...")
- log.msg("Resolving DNS for %d random hostnames..." % hostname_count)
-
- control = ['NXDOMAIN']
- responses = []
-
- for x in range(hostname_count):
- random_hostname = self.get_random_hostname(hostname_length)
- response_match, response_address = self.dns_resolve_match(random_hostname,
- control[0])
- for address in response_address:
- if response_match is False:
- log.msg("Strangely, DNS resolution of the random hostname")
- log.msg("%s actually points to %s"
- % (random_hostname, response_address))
- responses = responses + [address]
- else:
- responses = responses + [address]
-
- intersection = set(responses) & set(control)
- relative_complement = set(responses) - set(control)
- r = set(responses)
-
- if len(intersection) == 1:
- log.msg("All %d random hostnames properly resolved to NXDOMAIN."
- % hostname_count)
- return True, relative_complement
- elif (len(intersection) == 1) and (len(r) > 1):
- log.msg("Something odd happened. Some random hostnames correctly")
- log.msg("resolved to NXDOMAIN, but several others resolved to")
- log.msg("to the following addresses: %s" % relative_complement)
- return False, relative_complement
- elif (len(intersection) == 0) and (len(r) == 1):
- log.msg("All random hostnames resolved to the IP address ")
- log.msg("'%s', which is indicative of a captive portal." % r)
- return False, relative_complement
- else:
- log.debug("Apparently, pigs are flying on your network, 'cause a")
- log.debug("bunch of hostnames made from 32-byte random strings")
- log.debug("just magically resolved to a bunch of random addresses.")
- log.debug("That is definitely highly improbable. In fact, my napkin")
- log.debug("tells me that the probability of just one of those")
- log.debug("hostnames resolving to an address is 1.68e-59, making")
- log.debug("it nearly twice as unlikely as an MD5 hash collision.")
- log.debug("Either someone is seriously messing with your network,")
- log.debug("or else you are witnessing the impossible. %s" % r)
- return False, relative_complement
-
- def google_dns_cp_test(self):
- """
- Google Chrome resolves three 10-byte random hostnames.
- """
- subtest = "Google Chrome DNS-based"
- log.msg("Running the Google Chrome DNS-based captive portal test...")
-
- gmatch, google_dns_result = self.compare_random_hostnames(3, 10)
-
- if gmatch:
- log.msg("Google Chrome DNS-based captive portal test did not")
- log.msg("detect a captive portal.")
- return google_dns_result
- else:
- log.msg("Google Chrome DNS-based captive portal test believes")
- log.msg("you are in a captive portal, or else something very")
- log.msg("odd is happening with your DNS.")
- return google_dns_result
-
- def ms_dns_cp_test(self):
- """
- Microsoft "phones home" to a server which will always resolve
- to the same address.
- """
- subtest = "Microsoft NCSI DNS-based"
-
- log.msg("")
- log.msg("Running the Microsoft NCSI DNS-based captive portal")
- log.msg("test...")
-
- msmatch, ms_dns_result = self.dns_resolve_match("dns.msftncsi.com",
- "131.107.255.255")
- if msmatch:
- log.msg("Microsoft NCSI DNS-based captive portal test did not")
- log.msg("detect a captive portal.")
- return ms_dns_result
- else:
- log.msg("Microsoft NCSI DNS-based captive portal test ")
- log.msg("believes you are in a captive portal.")
- return ms_dns_result
-
- def run_vendor_dns_tests(self):
- """
- Run the vendor DNS tests.
- """
- report = {}
- report['google_dns_cp'] = self.google_dns_cp_test()
- report['ms_dns_cp'] = self.ms_dns_cp_test()
-
- return report
-
- def run_vendor_tests(self, *a, **kw):
- """
- These are several vendor tests used to detect the presence of
- a captive portal. Each test compares HTTP status code and
- content to the control results and has its own User-Agent
- string, in order to emulate the test as it would occur on the
- device it was intended for. Vendor tests are defined in the
- format:
- [exp_url, ctrl_result, ctrl_code, ua, test_name]
- """
-
- vendor_tests = [['http://www.apple.com/library/test/success.html',
- 'Success',
- '200',
- 'Mozilla/5.0 (iPhone; U; CPU like Mac OS X; en) AppleWebKit/420+ (KHTML, like Gecko) Version/3.0 Mobile/1A543a Safari/419.3',
- 'Apple HTTP Captive Portal'],
- ['http://tools.ietf.org/html/draft-nottingham-http-portal-02',
- '428 Network Authentication Required',
- '428',
- 'Mozilla/5.0 (Windows NT 6.1; rv:5.0) Gecko/20100101 Firefox/5.0',
- 'W3 Captive Portal'],
- ['http://www.msftncsi.com/ncsi.txt',
- 'Microsoft NCSI',
- '200',
- 'Microsoft NCSI',
- 'MS HTTP Captive Portal',]]
-
- cm = self.http_content_match_fuzzy_opt
- sm = self.http_status_code_match
- snm = self.http_status_code_no_match
-
- def compare_content(status_func, fuzzy, experiment_url, control_result,
- control_code, headers, test_name):
- log.msg("")
- log.msg("Running the %s test..." % test_name)
-
- content_match, experiment_code, experiment_headers = cm(experiment_url,
- control_result,
- headers, fuzzy)
- status_match = status_func(experiment_code, control_code)
-
- if status_match and content_match:
- log.msg("The %s test was unable to detect" % test_name)
- log.msg("a captive portal.")
- return True
- else:
- log.msg("The %s test shows that your network" % test_name)
- log.msg("is filtered.")
- return False
-
- result = []
- for vt in vendor_tests:
- report = {}
- report['vt'] = vt
-
- experiment_url = vt[0]
- control_result = vt[1]
- control_code = vt[2]
- headers = {'User-Agent': vt[3]}
- test_name = vt[4]
-
- args = (experiment_url, control_result, control_code, headers, test_name)
-
- if test_name == "MS HTTP Captive Portal":
- report['result'] = compare_content(sm, False, *args)
-
- elif test_name == "Apple HTTP Captive Portal":
- report['result'] = compare_content(sm, True, *args)
-
- elif test_name == "W3 Captive Portal":
- report['result'] = compare_content(snm, True, *args)
-
- else:
- log.warn("Ooni is trying to run an undefined CP vendor test.")
- result.append(report)
- return result
-
- def control(self, experiment_result, args):
- """
- Compares the content and status code of the HTTP response for
- experiment_url with the control_result and control_code
- respectively. If the status codes match, but the experimental
- content and control_result do not match, fuzzy matching is enabled
- to determine if the control_result is at least included somewhere
- in the experimental content. Returns True if matches are found,
- and False if otherwise.
- """
- # XXX put this back to being parametrized
- #experiment_url = self.local_options['experiment-url']
- experiment_url = 'http://google.com/'
- control_result = 'XX'
- control_code = 200
- ua = self.local_options['user-agent']
-
- cm = self.http_content_match_fuzzy_opt
- sm = self.http_status_code_match
- snm = self.http_status_code_no_match
-
- log.msg("Running test for '%s'..." % experiment_url)
- content_match, experiment_code, experiment_headers = cm(experiment_url,
- control_result)
- status_match = sm(experiment_code, control_code)
- if status_match and content_match:
- log.msg("The test for '%s'" % experiment_url)
- log.msg("was unable to detect a captive portal.")
-
- self.report['result'] = True
-
- elif status_match and not content_match:
- log.msg("Retrying '%s' with fuzzy match enabled."
- % experiment_url)
- fuzzy_match, experiment_code, experiment_headers = cm(experiment_url,
- control_result,
- fuzzy=True)
- if fuzzy_match:
- self.report['result'] = True
- else:
- log.msg("Found modified content on '%s'," % experiment_url)
- log.msg("which could indicate a captive portal.")
-
- self.report['result'] = False
- else:
- log.msg("The content comparison test for ")
- log.msg("'%s'" % experiment_url)
- log.msg("shows that your HTTP traffic is filtered.")
-
- self.report['result'] = False
-
- @defer.inlineCallbacks
- def test_captive_portal(self):
- """
- Runs the CaptivePortal(Test).
-
- CONFIG OPTIONS
- --------------
-
- If "do_captive_portal_vendor_tests" is set to "true", then vendor
- specific captive portal HTTP-based tests will be run.
-
- If "do_captive_portal_dns_tests" is set to "true", then vendor
- specific captive portal DNS-based tests will be run.
-
- If "check_dns_requests" is set to "true", then Ooni-probe will
- attempt to check that your DNS requests are not being tampered with
- by a captive portal.
-
- If "captive_portal" = "yourfilename.txt", then user-specified tests
- will be run.
-
- Any combination of the above tests can be run.
- """
-
- log.msg("")
- log.msg("Running vendor tests...")
- self.report['vendor_tests'] = yield threads.deferToThread(self.run_vendor_tests)
-
- log.msg("")
- log.msg("Running vendor DNS-based tests...")
- self.report['vendor_dns_tests'] = yield threads.deferToThread(self.run_vendor_dns_tests)
-
- log.msg("")
- log.msg("Checking that DNS requests are not being tampered...")
- self.report['check0x20'] = yield threads.deferToThread(self.check_0x20_to_auth_ns, 'ooni.nu')
-
- log.msg("")
- log.msg("Captive portal test finished!")
-
diff --git a/nettests/core/chinatrigger.py b/nettests/core/chinatrigger.py
deleted file mode 100644
index de1f64d..0000000
--- a/nettests/core/chinatrigger.py
+++ /dev/null
@@ -1,108 +0,0 @@
-import random
-import string
-import struct
-import time
-
-from twisted.python import usage
-from ooni.templates.scapyt import BaseScapyTest
-
-class UsageOptions(usage.Options):
- optParameters = [['dst', 'd', None, 'Specify the target address'],
- ['port', 'p', None, 'Specify the target port']
- ]
-
-class ChinaTriggerTest(BaseScapyTest):
- """
- This test is a OONI based implementation of the C tool written
- by Philipp Winter to engage chinese probes in active scanning.
-
- Example of running it:
- ./bin/ooniprobe chinatrigger -d 127.0.0.1 -p 8080
- """
-
- name = "chinatrigger"
- usageOptions = UsageOptions
- requiredOptions = ['dst', 'port']
- timeout = 2
-
- def setUp(self):
- self.dst = self.localOptions['dst']
- self.port = int(self.localOptions['port'])
-
- @staticmethod
- def set_random_servername(pkt):
- ret = pkt[:121]
- for i in range(16):
- ret += random.choice(string.ascii_lowercase)
- ret += pkt[121+16:]
- return ret
-
- @staticmethod
- def set_random_time(pkt):
- ret = pkt[:11]
- ret += struct.pack('!I', int(time.time()))
- ret += pkt[11+4:]
- return ret
-
- @staticmethod
- def set_random_field(pkt):
- ret = pkt[:15]
- for i in range(28):
- ret += chr(random.randint(0, 255))
- ret += pkt[15+28:]
- return ret
-
- @staticmethod
- def mutate(pkt, idx):
- """
- Slightly changed mutate function.
- """
- ret = pkt[:idx-1]
- mutation = chr(random.randint(0, 255))
- while mutation == pkt[idx]:
- mutation = chr(random.randint(0, 255))
- ret += mutation
- ret += pkt[idx:]
- return ret
-
- @staticmethod
- def set_all_random_fields(pkt):
- pkt = ChinaTriggerTest.set_random_servername(pkt)
- pkt = ChinaTriggerTest.set_random_time(pkt)
- pkt = ChinaTriggerTest.set_random_field(pkt)
- return pkt
-
- def test_send_mutations(self):
- from scapy.all import IP, TCP
- pkt = "\x16\x03\x01\x00\xcc\x01\x00\x00\xc8"\
- "\x03\x01\x4f\x12\xe5\x63\x3f\xef\x7d"\
- "\x20\xb9\x94\xaa\x04\xb0\xc1\xd4\x8c"\
- "\x50\xcd\xe2\xf9\x2f\xa9\xfb\x78\xca"\
- "\x02\xa8\x73\xe7\x0e\xa8\xf9\x00\x00"\
- "\x3a\xc0\x0a\xc0\x14\x00\x39\x00\x38"\
- "\xc0\x0f\xc0\x05\x00\x35\xc0\x07\xc0"\
- "\x09\xc0\x11\xc0\x13\x00\x33\x00\x32"\
- "\xc0\x0c\xc0\x0e\xc0\x02\xc0\x04\x00"\
- "\x04\x00\x05\x00\x2f\xc0\x08\xc0\x12"\
- "\x00\x16\x00\x13\xc0\x0d\xc0\x03\xfe"\
- "\xff\x00\x0a\x00\xff\x01\x00\x00\x65"\
- "\x00\x00\x00\x1d\x00\x1b\x00\x00\x18"\
- "\x77\x77\x77\x2e\x67\x6e\x6c\x69\x67"\
- "\x78\x7a\x70\x79\x76\x6f\x35\x66\x76"\
- "\x6b\x64\x2e\x63\x6f\x6d\x00\x0b\x00"\
- "\x04\x03\x00\x01\x02\x00\x0a\x00\x34"\
- "\x00\x32\x00\x01\x00\x02\x00\x03\x00"\
- "\x04\x00\x05\x00\x06\x00\x07\x00\x08"\
- "\x00\x09\x00\x0a\x00\x0b\x00\x0c\x00"\
- "\x0d\x00\x0e\x00\x0f\x00\x10\x00\x11"\
- "\x00\x12\x00\x13\x00\x14\x00\x15\x00"\
- "\x16\x00\x17\x00\x18\x00\x19\x00\x23"\
- "\x00\x00"
-
- pkt = ChinaTriggerTest.set_all_random_fields(pkt)
- pkts = [IP(dst=self.dst)/TCP(dport=self.port)/pkt]
- for x in range(len(pkt)):
- mutation = IP(dst=self.dst)/TCP(dport=self.port)/ChinaTriggerTest.mutate(pkt, x)
- pkts.append(mutation)
- return self.sr(pkts, timeout=2)
-
diff --git a/nettests/core/daphn3.py b/nettests/core/daphn3.py
deleted file mode 100644
index 09279fa..0000000
--- a/nettests/core/daphn3.py
+++ /dev/null
@@ -1,119 +0,0 @@
-# -*- encoding: utf-8 -*-
-from twisted.python import usage
-from twisted.internet import protocol, endpoints, reactor
-
-from ooni import nettest
-from ooni.kit import daphn3
-from ooni.utils import log
-
-class Daphn3ClientProtocol(daphn3.Daphn3Protocol):
- def nextStep(self):
- log.debug("Moving on to next step in the state walk")
- self.current_data_received = 0
- if self.current_step >= (len(self.steps) - 1):
- log.msg("Reached the end of the state machine")
- log.msg("Censorship fingerpint bisected!")
- step_idx, mutation_idx = self.factory.mutation
- log.msg("step_idx: %s | mutation_id: %s" % (step_idx, mutation_idx))
- #self.transport.loseConnection()
- if self.report:
- self.report['mutation_idx'] = mutation_idx
- self.report['step_idx'] = step_idx
- self.d.callback(None)
- return
- else:
- self.current_step += 1
- if self._current_step_role() == self.role:
- # We need to send more data because we are again responsible for
- # doing so.
- self.sendPayload()
-
-
-class Daphn3ClientFactory(protocol.ClientFactory):
- protocol = daphn3.Daphn3Protocol
- mutation = [0,0]
- steps = None
-
- def buildProtocol(self, addr):
- p = self.protocol()
- p.steps = self.steps
- p.factory = self
- return p
-
- def startedConnecting(self, connector):
- log.msg("Started connecting %s" % connector)
-
- def clientConnectionFailed(self, reason, connector):
- log.err("We failed connecting the the OONIB")
- log.err("Cannot perform test. Perhaps it got blocked?")
- log.err("Please report this to tor-assistants@xxxxxxxxxxxxxx")
-
- def clientConnectionLost(self, reason, connector):
- log.err("Daphn3 client connection lost")
- print reason
-
-class daphn3Args(usage.Options):
- optParameters = [
- ['host', 'h', '127.0.0.1', 'Target Hostname'],
- ['port', 'p', 57003, 'Target port number']]
-
- optFlags = [['pcap', 'c', 'Specify that the input file is a pcap file'],
- ['yaml', 'y', 'Specify that the input file is a YAML file (default)']]
-
-class daphn3Test(nettest.NetTestCase):
-
- name = "Daphn3"
- usageOptions = daphn3Args
- inputFile = ['file', 'f', None,
- 'Specify the pcap or YAML file to be used as input to the test']
-
- #requiredOptions = ['file']
-
- steps = None
-
- def inputProcessor(self, filename):
- """
- step_idx is the step in the packet exchange
- ex.
- [.X.] are packets sent by a client or a server
-
- client: [.1.] [.3.] [.4.]
- server: [.2.] [.5.]
-
- mutation_idx: is the sub index of the packet as in the byte of the
- packet at the step_idx that is to be mutated
-
- """
- if self.localOptions['pcap']:
- daphn3Steps = daphn3.read_pcap(filename)
- else:
- daphn3Steps = daphn3.read_yaml(filename)
- log.debug("Loaded these steps %s" % daphn3Steps)
- yield daphn3Steps
-
- def test_daphn3(self):
- host = self.localOptions['host']
- port = int(self.localOptions['port'])
-
- def failure(failure):
- log.msg("Failed to connect")
- self.report['censored'] = True
- self.report['mutation'] = 0
- raise Exception("Error in connection, perhaps the backend is censored")
- return
-
- def success(protocol):
- log.msg("Successfully connected")
- protocol.sendPayload()
- return protocol.d
-
- log.msg("Connecting to %s:%s" % (host, port))
- endpoint = endpoints.TCP4ClientEndpoint(reactor, host, port)
- daphn3_factory = Daphn3ClientFactory()
- daphn3_factory.steps = self.input
- daphn3_factory.report = self.report
- d = endpoint.connect(daphn3_factory)
- d.addErrback(failure)
- d.addCallback(success)
- return d
-
diff --git a/nettests/core/dnsspoof.py b/nettests/core/dnsspoof.py
deleted file mode 100644
index 5c50c2f..0000000
--- a/nettests/core/dnsspoof.py
+++ /dev/null
@@ -1,69 +0,0 @@
-from twisted.internet import defer
-from twisted.python import usage
-
-from scapy.all import IP, UDP, DNS, DNSQR
-
-from ooni.templates import scapyt
-from ooni.utils import log
-
-class UsageOptions(usage.Options):
- optParameters = [['resolver', 'r', None,
- 'Specify the resolver that should be used for DNS queries (ip:port)'],
- ['hostname', 'h', None,
- 'Specify the hostname of a censored site'],
- ['backend', 'b', '8.8.8.8:53',
- 'Specify the IP address of a good DNS resolver (ip:port)']
- ]
-
-
-class DNSSpoof(scapyt.ScapyTest):
- name = "DNS Spoof"
- timeout = 2
-
- usageOptions = UsageOptions
-
- requiredOptions = ['hostname', 'resolver']
-
- def setUp(self):
- self.resolverAddr, self.resolverPort = self.localOptions['resolver'].split(':')
- self.resolverPort = int(self.resolverPort)
-
- self.controlResolverAddr, self.controlResolverPort = self.localOptions['backend'].split(':')
- self.controlResolverPort = int(self.controlResolverPort)
-
- self.hostname = self.localOptions['hostname']
-
- def postProcessor(self, report):
- """
- This is not tested, but the concept is that if the two responses
- match up then spoofing is occuring.
- """
- try:
- test_answer = report['test_a_lookup']['answered_packets'][0][1]
- control_answer = report['test_control_a_lookup']['answered_packets'][0][1]
- except IndexError:
- self.report['spoofing'] = 'no_answer'
- return
-
- if test_answer[UDP] == control_answer[UDP]:
- self.report['spoofing'] = True
- else:
- self.report['spoofing'] = False
- return
-
- @defer.inlineCallbacks
- def test_a_lookup(self):
- question = IP(dst=self.resolverAddr)/UDP()/DNS(rd=1,
- qd=DNSQR(qtype="A", qclass="IN", qname=self.hostname))
- log.msg("Performing query to %s with %s:%s" % (self.hostname, self.resolverAddr, self.resolverPort))
- yield self.sr1(question)
-
- @defer.inlineCallbacks
- def test_control_a_lookup(self):
- question = IP(dst=self.controlResolverAddr)/UDP()/DNS(rd=1,
- qd=DNSQR(qtype="A", qclass="IN", qname=self.hostname))
- log.msg("Performing query to %s with %s:%s" % (self.hostname,
- self.controlResolverAddr, self.controlResolverPort))
- yield self.sr1(question)
-
-
diff --git a/nettests/core/http_body_length.py b/nettests/core/http_body_length.py
deleted file mode 100644
index 7c5b0d2..0000000
--- a/nettests/core/http_body_length.py
+++ /dev/null
@@ -1,90 +0,0 @@
-# -*- encoding: utf-8 -*-
-#
-# :authors: Arturo Filastò
-# :licence: see LICENSE
-
-from twisted.internet import defer
-from twisted.python import usage
-from ooni.templates import httpt
-
-class UsageOptions(usage.Options):
- optParameters = [
- ['url', 'u', None, 'Specify a single URL to test.'],
- ['factor', 'f', 0.8, 'What factor should be used for triggering censorship (0.8 == 80%)']
- ]
-
-class HTTPBodyLength(httpt.HTTPTest):
- """
- Performs a two GET requests to the set of sites to be tested for
- censorship, one over a known good control channel (Tor), the other over the
- test network.
- We then look at the response body lengths and see if the control response
- differs from the experiment response by a certain factor.
- """
- name = "HTTP Body length test"
- author = "Arturo Filastò"
- version = "0.1"
-
- usageOptions = UsageOptions
-
- inputFile = ['file', 'f', None,
- 'List of URLS to perform GET and POST requests to']
-
- # These values are used for determining censorship based on response body
- # lengths
- control_body_length = None
- experiment_body_length = None
-
- def setUp(self):
- """
- Check for inputs.
- """
- if self.input:
- self.url = self.input
- elif self.localOptions['url']:
- self.url = self.localOptions['url']
- else:
- raise Exception("No input specified")
-
- self.factor = self.localOptions['factor']
-
- def compare_body_lengths(self):
- body_length_a = self.control_body_length
- body_length_b = self.experiment_body_length
-
- rel = float(body_length_a)/float(body_length_b)
- if rel > 1:
- rel = 1/rel
-
- self.report['body_proportion'] = rel
- self.report['factor'] = self.factor
- if rel < self.factor:
- self.report['censorship'] = True
- else:
- self.report['censorship'] = False
-
- def test_get(self):
- def errback(failure):
- log.err("There was an error while testing %s" % self.url)
- log.exception(failure)
-
- def control_body(result):
- self.control_body_length = len(result)
- if self.experiment_body_length:
- self.compare_body_lengths()
-
- def experiment_body(result):
- self.experiment_body_length = len(result)
- if self.control_body_length:
- self.compare_body_lengths()
-
- dl = []
- experiment_request = self.doRequest(self.url, method="GET",
- body_processor=experiment_body)
- control_request = self.doRequest(self.url, method="GET",
- use_tor=True, body_processor=control_body)
- dl.append(experiment_request)
- dl.append(control_request)
- d = defer.DeferredList(dl)
- return d
-
diff --git a/nettests/core/http_header_field_manipulation.py b/nettests/core/http_header_field_manipulation.py
deleted file mode 100644
index 08ee8c7..0000000
--- a/nettests/core/http_header_field_manipulation.py
+++ /dev/null
@@ -1,181 +0,0 @@
-# -*- encoding: utf-8 -*-
-#
-# :authors: Arturo Filastò
-# :licence: see LICENSE
-
-import random
-import json
-import yaml
-
-from twisted.python import usage
-
-from ooni.utils import log, net, randomStr
-from ooni.templates import httpt
-from ooni.utils.txagentwithsocks import TrueHeaders
-
-def random_capitalization(string):
- output = ""
- original_string = string
- string = string.swapcase()
- for i in range(len(string)):
- if random.randint(0, 1):
- output += string[i].swapcase()
- else:
- output += string[i]
- if original_string == output:
- return random_capitalization(output)
- else:
- return output
-
-class UsageOptions(usage.Options):
- optParameters = [
- ['backend', 'b', 'http://127.0.0.1:57001',
- 'URL of the backend to use for sending the requests'],
- ['headers', 'h', None,
- 'Specify a yaml formatted file from which to read the request headers to send']
- ]
-
-class HTTPHeaderFieldManipulation(httpt.HTTPTest):
- """
- It performes HTTP requests with request headers that vary capitalization
- towards a backend. If we detect that the headers the backend received
- matches the ones we have sent then we have detected tampering.
- """
- name = "HTTP Header Field Manipulation"
- author = "Arturo Filastò"
- version = "0.1.3"
-
- randomizeUA = False
- usageOptions = UsageOptions
-
- requiredOptions = ['backend']
-
- def get_headers(self):
- headers = {}
- if self.localOptions['headers']:
- try:
- f = open(self.localOptions['headers'])
- except IOError:
- raise Exception("Specified input file does not exist")
- content = ''.join(f.readlines())
- f.close()
- headers = yaml.safe_load(content)
- return headers
- else:
- # XXX generate these from a random choice taken from whatheaders.com
- # http://s3.amazonaws.com/data.whatheaders.com/whatheaders-latest.xml.zip
- headers = {"User-Agent": [random.choice(net.userAgents)[0]],
- "Accept": ["text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8"],
- "Accept-Encoding": ["gzip,deflate,sdch"],
- "Accept-Language": ["en-US,en;q=0.8"],
- "Accept-Charset": ["ISO-8859-1,utf-8;q=0.7,*;q=0.3"],
- "Host": [randomStr(15)+'.com']
- }
- return headers
-
- def get_random_caps_headers(self):
- headers = {}
- normal_headers = self.get_headers()
- for k, v in normal_headers.items():
- new_key = random_capitalization(k)
- headers[new_key] = v
- return headers
-
- def processInputs(self):
- if self.localOptions['backend']:
- self.url = self.localOptions['backend']
- else:
- raise Exception("No backend specified")
-
- def processResponseBody(self, data):
- self.check_for_tampering(data)
-
- def check_for_tampering(self, data):
- """
- Here we do checks to verify if the request we made has been tampered
- with. We have 3 categories of tampering:
-
- * **total** when the response is not a json object and therefore we were not
- able to reach the ooniprobe test backend
-
- * **request_line_capitalization** when the HTTP Request line (e.x. GET /
- HTTP/1.1) does not match the capitalization we set.
-
- * **header_field_number** when the number of headers we sent does not match
- with the ones the backend received
-
- * **header_name_capitalization** when the header field names do not match
- those that we sent.
-
- * **header_field_value** when the header field value does not match with the
- one we transmitted.
- """
- self.report['tampering'] = {
- 'total': False,
- 'request_line_capitalization': False,
- 'header_name_capitalization': False,
- 'header_field_value': False,
- 'header_field_number': False
- }
- try:
- response = json.loads(data)
- except ValueError:
- self.report['tampering']['total'] = True
- return
-
- request_request_line = "%s / HTTP/1.1" % self.request_method
-
- try:
- response_request_line = response['request_line']
- response_headers_dict = response['headers_dict']
- except KeyError:
- self.report['tampering']['total'] = True
- return
-
- if request_request_line != response_request_line:
- self.report['tampering']['request_line_capitalization'] = True
-
- request_headers = TrueHeaders(self.request_headers)
- diff = request_headers.getDiff(response_headers_dict, ignore=['Connection'])
- if diff:
- self.report['tampering']['header_field_name'] = True
- else:
- self.report['tampering']['header_field_name'] = False
- self.report['tampering']['header_name_diff'] = list(diff)
-
- def test_get(self):
- self.request_method = "GET"
- self.request_headers = self.get_random_caps_headers()
- return self.doRequest(self.url, self.request_method,
- headers=self.request_headers)
-
- def test_get_random_capitalization(self):
- self.request_method = random_capitalization("GET")
- self.request_headers = self.get_random_caps_headers()
- return self.doRequest(self.url, self.request_method,
- headers=self.request_headers)
-
- def test_post(self):
- self.request_method = "POST"
- self.request_headers = self.get_headers()
- return self.doRequest(self.url, self.request_method,
- headers=self.request_headers)
-
- def test_post_random_capitalization(self):
- self.request_method = random_capitalization("POST")
- self.request_headers = self.get_random_caps_headers()
- return self.doRequest(self.url, self.request_method,
- headers=self.request_headers)
-
- def test_put(self):
- self.request_method = "PUT"
- self.request_headers = self.get_headers()
- return self.doRequest(self.url, self.request_method,
- headers=self.request_headers)
-
- def test_put_random_capitalization(self):
- self.request_method = random_capitalization("PUT")
- self.request_headers = self.get_random_caps_headers()
- return self.doRequest(self.url, self.request_method,
- headers=self.request_headers)
-
diff --git a/nettests/core/http_host.py b/nettests/core/http_host.py
deleted file mode 100644
index 3ebfd04..0000000
--- a/nettests/core/http_host.py
+++ /dev/null
@@ -1,109 +0,0 @@
-# -*- encoding: utf-8 -*-
-#
-# HTTP Host Test
-# **************
-#
-# :authors: Arturo Filastò
-# :licence: see LICENSE
-
-import json
-from twisted.python import usage
-
-from ooni.utils import log
-from ooni.templates import httpt
-
-class UsageOptions(usage.Options):
- optParameters = [['backend', 'b', 'http://127.0.0.1:57001',
- 'URL of the test backend to use'],
- ['content', 'c', None,
- 'The file to read from containing the content of a block page']]
-
-class HTTPHost(httpt.HTTPTest):
- """
- This test is aimed at detecting the presence of a transparent HTTP proxy
- and enumerating the sites that are being censored by it.
-
- It places inside of the Host header field the hostname of the site that is
- to be tested for censorship and then determines if the probe is behind a
- transparent HTTP proxy (because the response from the backend server does
- not match) and if the site is censorsed, by checking if the page that it
- got back matches the input block page.
- """
- name = "HTTP Host"
- author = "Arturo Filastò"
- version = "0.2"
-
- usageOptions = UsageOptions
-
- inputFile = ['file', 'f', None,
- 'List of hostnames to test for censorship']
-
- requiredOptions = ['backend']
-
- def test_send_host_header(self):
- """
- Stuffs the HTTP Host header field with the site to be tested for
- censorship and does an HTTP request of this kind to our backend.
-
- We randomize the HTTP User Agent headers.
- """
- headers = {}
- headers["Host"] = [self.input]
- return self.doRequest(self.localOptions['backend'],
- headers=headers)
-
- def check_for_censorship(self, body):
- """
- If we have specified what a censorship page looks like here we will
- check if the page we are looking at matches it.
-
- XXX this is not tested, though it is basically what was used to detect
- censorship in the palestine case.
- """
- if self.localOptions['content']:
- self.report['censored'] = True
-
- censorship_page = open(self.localOptions['content'])
- response_page = iter(body.split("\n"))
-
- for censorship_line in censorship_page.xreadlines():
- response_line = response_page.next()
- if response_line != censorship_line:
- self.report['censored'] = False
- break
-
- censorship_page.close()
-
- def processResponseBody(self, body):
- """
- XXX this is to be filled in with either a domclass based classified or
- with a rule that will allow to detect that the body of the result is
- that of a censored site.
- """
- # If we don't see a json array we know that something is wrong for
- # sure
- if not body.startswith("{"):
- self.report['transparent_http_proxy'] = True
- self.check_for_censorship(body)
- return
- try:
- content = json.loads(body)
- except:
- log.debug("The json does not parse, this is not what we expected")
- self.report['trans_http_proxy'] = True
- self.check_for_censorship(body)
- return
-
- # We base the determination of the presence of a transparent HTTP
- # proxy on the basis of the response containing the json that is to be
- # returned by a HTTP Request Test Helper
- if 'request_method' in content and \
- 'request_uri' in content and \
- 'request_headers' in content:
- log.debug("Found the keys I expected in %s" % content)
- self.report['trans_http_proxy'] = False
- else:
- log.debug("Did not find the keys I expected in %s" % content)
- self.report['trans_http_proxy'] = True
-
- self.check_for_censorship(body)
diff --git a/nettests/core/http_invalid_requests.py b/nettests/core/http_invalid_requests.py
deleted file mode 100644
index 7e6f47f..0000000
--- a/nettests/core/http_invalid_requests.py
+++ /dev/null
@@ -1,63 +0,0 @@
-# -*- encoding: utf-8 -*-
-from twisted.python import usage
-
-from ooni.utils import randomStr
-from ooni.templates import tcpt
-
-class UsageOptions(usage.Options):
- optParameters = [['backend', 'b', '127.0.0.1:57002',
- 'The OONI backend that runs a TCP echo server (must be on port 80)']]
-
- optFlags = [['nopayloadmatch', 'n',
- "Don't match the payload of the response. This option is used when you don't have a TCP echo server running"]]
-
-class HTTPInvalidRequests(tcpt.TCPTest):
- name = "HTTP Invalid Requests"
- version = "0.1.1"
- authors = "Arturo Filastò"
-
- inputFile = ['file', 'f', None,
- 'Input file of list of hostnames to attempt to resolve']
-
- usageOptions = UsageOptions
- requiredOptions = ['backend']
-
- def setUp(self):
- try:
- self.address, self.port = self.localOptions['backend'].split(":")
- self.port = int(self.port)
- except:
- raise usage.UsageError("Invalid backend address specified (must be address:port)")
-
- def test_random_invalid_request(self):
- """
- We test sending data to a TCP echo server, if what we get back is not
- what we have sent then there is tampering going on.
- This is for example what squid will return when performing such
- request:
-
- HTTP/1.0 400 Bad Request
- Server: squid/2.6.STABLE21
- Date: Sat, 23 Jul 2011 02:22:44 GMT
- Content-Type: text/html
- Content-Length: 1178
- Expires: Sat, 23 Jul 2011 02:22:44 GMT
- X-Squid-Error: ERR_INVALID_REQ 0
- X-Cache: MISS from cache_server
- X-Cache-Lookup: NONE from cache_server:3128
- Via: 1.0 cache_server:3128 (squid/2.6.STABLE21)
- Proxy-Connection: close
-
- """
- payload = randomStr(10) + "\n\r"
- def got_all_data(received_array):
- if not self.localOptions['nopayloadmatch']:
- first = received_array[0]
- if first != payload:
- self.report['tampering'] = True
- else:
- self.report['tampering'] = 'unknown'
-
- d = self.sendPayload(payload)
- d.addCallback(got_all_data)
- return d
diff --git a/nettests/core/http_keyword_filtering.py b/nettests/core/http_keyword_filtering.py
deleted file mode 100644
index 0ae9c52..0000000
--- a/nettests/core/http_keyword_filtering.py
+++ /dev/null
@@ -1,45 +0,0 @@
-# -*- encoding: utf-8 -*-
-#
-# :authors: Arturo Filastò
-# :licence: see LICENSE
-
-from twisted.python import usage
-
-from ooni.templates import httpt
-
-class UsageOptions(usage.Options):
- optParameters = [['backend', 'b', 'http://127.0.0.1:57001',
- 'URL of the test backend to use']]
-
-class HTTPKeywordFiltering(httpt.HTTPTest):
- """
- This test involves performing HTTP requests containing to be tested for
- censorship keywords.
-
- It does not detect censorship on the client, but just logs the response from the
- HTTP backend server.
- """
- name = "HTTP Keyword Filtering"
- author = "Arturo Filastò"
- version = "0.1.1"
-
- inputFile = ['file', 'f', None, 'List of keywords to use for censorship testing']
-
- usageOptions = UsageOptions
-
- requiredOptions = ['backend']
-
- def test_get(self):
- """
- Perform a HTTP GET request to the backend containing the keyword to be
- tested inside of the request body.
- """
- return self.doRequest(self.localOptions['backend'], method="GET", body=self.input)
-
- def test_post(self):
- """
- Perform a HTTP POST request to the backend containing the keyword to be
- tested inside of the request body.
- """
- return self.doRequest(self.localOptions['backend'], method="POST", body=self.input)
-
diff --git a/nettests/core/http_uk_mobile_networks.py b/nettests/core/http_uk_mobile_networks.py
deleted file mode 100644
index 784a9e9..0000000
--- a/nettests/core/http_uk_mobile_networks.py
+++ /dev/null
@@ -1,85 +0,0 @@
-# -*- encoding: utf-8 -*-
-import yaml
-
-from twisted.python import usage
-from twisted.plugin import IPlugin
-
-from ooni.templates import httpt
-from ooni.utils import log
-
-class UsageOptions(usage.Options):
- """
- See https://github.com/hellais/ooni-inputs/processed/uk_mobile_networks_redirects.yaml
- to see how the rules file should look like.
- """
- optParameters = [
- ['rules', 'y', None,
- 'Specify the redirect rules file ']
- ]
-
-class HTTPUKMobileNetworksTest(httpt.HTTPTest):
- """
- This test was thought of by Open Rights Group and implemented with the
- purpose of detecting censorship in the UK.
- For more details on this test see:
- https://trac.torproject.org/projects/tor/ticket/6437
- XXX port the knowledge from the trac ticket into this test docstring
- """
- name = "HTTP UK mobile network redirect test"
-
- usageOptions = UsageOptions
-
- followRedirects = True
-
- inputFile = ['urls', 'f', None, 'List of urls one per line to test for censorship']
- requiredOptions = ['urls']
-
- def testPattern(self, value, pattern, type):
- if type == 'eq':
- return value == pattern
- elif type == 're':
- import re
- if re.match(pattern, value):
- return True
- else:
- return False
- else:
- return None
-
- def testPatterns(self, patterns, location):
- test_result = False
-
- if type(patterns) == list:
- for pattern in patterns:
- test_result |= self.testPattern(location, pattern['value'], pattern['type'])
- rules_file = self.localOptions['rules']
-
- return test_result
-
- def testRules(self, rules, location):
- result = {}
- blocked = False
- for rule, value in rules.items():
- current_rule = {}
- current_rule['name'] = value['name']
- current_rule['patterns'] = value['patterns']
- current_rule['test'] = self.testPatterns(value['patterns'], location)
- blocked |= current_rule['test']
- result[rule] = current_rule
- result['blocked'] = blocked
- return result
-
- def processRedirect(self, location):
- self.report['redirect'] = None
- rules_file = self.localOptions['rules']
-
- fp = open(rules_file)
- rules = yaml.safe_load(fp)
- fp.close()
-
- log.msg("Testing rules %s" % rules)
- redirect = self.testRules(rules, location)
- self.report['redirect'] = redirect
-
-
-
diff --git a/nettests/core/http_url_list.py b/nettests/core/http_url_list.py
deleted file mode 100644
index 0accaae..0000000
--- a/nettests/core/http_url_list.py
+++ /dev/null
@@ -1,98 +0,0 @@
-# -*- encoding: utf-8 -*-
-#
-# :authors: Arturo Filastò
-# :licence: see LICENSE
-
-from twisted.internet import defer
-from twisted.python import usage
-from ooni.templates import httpt
-from ooni.utils import log
-
-class UsageOptions(usage.Options):
- optParameters = [['content', 'c', None,
- 'The file to read from containing the content of a block page'],
- ['url', 'u', None, 'Specify a single URL to test.']
- ]
-
-class HTTPURLList(httpt.HTTPTest):
- """
- Performs GET, POST and PUT requests to a list of URLs specified as
- input and checks if the page that we get back as a result matches that
- of a block page given as input.
-
- If no block page is given as input to the test it will simply collect the
- responses to the HTTP requests and write them to a report file.
- """
- name = "HTTP URL List"
- author = "Arturo Filastò"
- version = "0.1.3"
-
- usageOptions = UsageOptions
-
- inputFile = ['file', 'f', None,
- 'List of URLS to perform GET and POST requests to']
-
- def setUp(self):
- """
- Check for inputs.
- """
- if self.input:
- self.url = self.input
- elif self.localOptions['url']:
- self.url = self.localOptions['url']
- else:
- raise Exception("No input specified")
-
- def check_for_content_censorship(self, body):
- """
- If we have specified what a censorship page looks like here we will
- check if the page we are looking at matches it.
-
- XXX this is not tested, though it is basically what was used to detect
- censorship in the palestine case.
- """
- self.report['censored'] = True
-
- censorship_page = open(self.localOptions['content']).xreadlines()
- response_page = iter(body.split("\n"))
-
- # We first allign the two pages to the first HTML tag (something
- # starting with <). This is useful so that we can give as input to this
- # test something that comes from the output of curl -kis
- # http://the_page/
- for line in censorship_page:
- if line.strip().startswith("<"):
- break
- for line in response_page:
- if line.strip().startswith("<"):
- break
-
- for censorship_line in censorship_page:
- try:
- response_line = response_page.next()
- except StopIteration:
- # The censored page and the response we got do not match in
- # length.
- self.report['censored'] = False
- break
- censorship_line = censorship_line.replace("\n", "")
- if response_line != censorship_line:
- self.report['censored'] = False
-
- censorship_page.close()
-
- def processResponseBody(self, body):
- if self.localOptions['content']:
- log.msg("Checking for censorship in response body")
- self.check_for_content_censorship(body)
-
- def test_get(self):
- return self.doRequest(self.url, method="GET")
-
- def test_post(self):
- return self.doRequest(self.url, method="POST")
-
- def test_put(self):
- return self.doRequest(self.url, method="PUT")
-
-
diff --git a/nettests/core/keyword_filtering.py b/nettests/core/keyword_filtering.py
deleted file mode 100644
index 9eec4ff..0000000
--- a/nettests/core/keyword_filtering.py
+++ /dev/null
@@ -1,52 +0,0 @@
-# -*- encoding: utf-8 -*-
-#
-# :authors: Arturo Filastò
-# :licence: see LICENSE
-
-from twisted.python import usage
-from twisted.internet import defer
-
-from ooni.utils import log
-from ooni.templates import scapyt
-
-from scapy.all import *
-
-class UsageOptions(usage.Options):
- optParameters = [
- ['backend', 'b', '127.0.0.1:57002', 'Test backend running TCP echo'],
- ['timeout', 't', 5, 'Timeout after which to give up waiting for RST packets']
- ]
-
-class KeywordFiltering(scapyt.BaseScapyTest):
- name = "Keyword Filtering detection based on RST packets"
- author = "Arturo Filastò"
- version = "0.1"
-
- usageOptions = UsageOptions
-
- inputFile = ['file', 'f', None,
- 'List of keywords to use for censorship testing']
-
- def test_tcp_keyword_filtering(self):
- """
- Places the keyword to be tested in the payload of a TCP packet.
- XXX need to implement bisection method for enumerating keywords.
- though this should not be an issue since we are testing all
- the keywords in parallel.
- """
- def finished(packets):
- log.debug("Finished running TCP traceroute test on port %s" % port)
- answered, unanswered = packets
- self.report['rst_packets'] = []
- for snd, rcv in answered:
- # The received packet has the RST flag
- if rcv[TCP].flags == 4:
- self.report['rst_packets'].append(rcv)
-
- backend_ip, backend_port = self.localOptions['backend']
- keyword_to_test = str(self.input)
- packets = IP(dst=backend_ip,id=RandShort())/TCP(dport=backend_port)/keyword_to_test
- d = self.sr(packets, timeout=timeout)
- d.addCallback(finished)
- return d
-
diff --git a/nettests/core/parasitictraceroute.py b/nettests/core/parasitictraceroute.py
deleted file mode 100644
index 631c24b..0000000
--- a/nettests/core/parasitictraceroute.py
+++ /dev/null
@@ -1,129 +0,0 @@
-# -*- encoding: utf-8 -*-
-#
-# :authors: Arturo Filastò
-# :licence: see LICENSE
-
-from twisted.python import usage
-from twisted.internet import defer
-
-from ooni.templates import scapyt
-
-from scapy.all import *
-
-from ooni.utils import log
-
-class UsageOptions(usage.Options):
- optParameters = [['backend', 'b', 'google.com', 'Test backend to use'],
- ['timeout', 't', 5, 'The timeout for the traceroute test'],
- ['maxttl', 'm', 64, 'The maximum value of ttl to set on packets'],
- ['dstport', 'd', 80, 'Set the destination port of the traceroute test'],
- ['srcport', 'p', None, 'Set the source port to a specific value']]
-
-class ParasiticalTracerouteTest(scapyt.BaseScapyTest):
- name = "Parasitic TCP Traceroute Test"
- author = "Arturo Filastò"
- version = "0.1"
-
- usageOptions = UsageOptions
-
- def setUp(self):
- def get_sport():
- if self.localOptions['srcport']:
- return int(self.localOptions['srcport'])
- else:
- return random.randint(1024, 65535)
- self.get_sport = get_sport
-
- self.dst_ip = socket.gethostbyaddr(self.localOptions['backend'])[2][0]
-
- self.dport = int(self.localOptions['dstport'])
- self.max_ttl = int(self.localOptions['maxttl'])
-
- @defer.inlineCallbacks
- def test_parasitic_tcp_traceroute(self):
- """
- Establishes a TCP stream, then sequentially sends TCP packets with
- increasing TTL until we reach the ttl of the destination.
-
- Requires the backend to respond with an ACK to our SYN packet (i.e.
- the port must be open)
-
- XXX this currently does not work properly. The problem lies in the fact
- that we are currently using the scapy layer 3 socket. This socket makes
- packets received be trapped by the kernel TCP stack, therefore when we
- send out a SYN and get back a SYN-ACK the kernel stack will reply with
- a RST because it did not send a SYN.
-
- The quick fix to this would be to establish a TCP stream using socket
- calls and then "cannibalizing" the TCP session with scapy.
-
- The real fix is to make scapy use libpcap instead of raw sockets
- obviously as we previously did... arg.
- """
- sport = self.get_sport()
- dport = self.dport
- ipid = int(RandShort())
-
- ip_layer = IP(dst=self.dst_ip,
- id=ipid, ttl=self.max_ttl)
-
- syn = ip_layer/TCP(sport=sport, dport=dport, flags="S", seq=0)
-
- log.msg("Sending...")
- syn.show2()
-
- synack = yield self.sr1(syn)
-
- log.msg("Got response...")
- synack.show2()
-
- if not synack:
- log.err("Got no response. Try increasing max_ttl")
- return
-
- if synack[TCP].flags == 11:
- log.msg("Got back a FIN ACK. The destination port is closed")
- return
-
- elif synack[TCP].flags == 18:
- log.msg("Got a SYN ACK. All is well.")
- else:
- log.err("Got an unexpected result")
- return
-
- ack = ip_layer/TCP(sport=synack.dport,
- dport=dport, flags="A",
- seq=synack.ack, ack=synack.seq + 1)
-
- yield self.send(ack)
-
- self.report['hops'] = []
- # For the time being we make the assumption that we are NATted and
- # that the NAT will forward the packet to the destination even if the TTL has
- for ttl in range(1, self.max_ttl):
- log.msg("Sending packet with ttl of %s" % ttl)
- ip_layer.ttl = ttl
- empty_tcp_packet = ip_layer/TCP(sport=synack.dport,
- dport=dport, flags="A",
- seq=synack.ack, ack=synack.seq + 1)
-
- answer = yield self.sr1(empty_tcp_packet)
- if not answer:
- log.err("Got no response for ttl %s" % ttl)
- continue
-
- try:
- icmp = answer[ICMP]
- report = {'ttl': empty_tcp_packet.ttl,
- 'address': answer.src,
- 'rtt': answer.time - empty_tcp_packet.time
- }
- log.msg("%s: %s" % (dport, report))
- self.report['hops'].append(report)
-
- except IndexError:
- if answer.src == self.dst_ip:
- answer.show()
- log.msg("Reached the destination. We have finished the traceroute")
- return
-
diff --git a/nettests/core/squid.py b/nettests/core/squid.py
deleted file mode 100644
index 777bc3e..0000000
--- a/nettests/core/squid.py
+++ /dev/null
@@ -1,117 +0,0 @@
-# -*- encoding: utf-8 -*-
-#
-# Squid transparent HTTP proxy detector
-# *************************************
-#
-# :authors: Arturo Filastò
-# :licence: see LICENSE
-
-from ooni import utils
-from ooni.utils import log
-from ooni.templates import httpt
-
-class SquidTest(httpt.HTTPTest):
- """
- This test aims at detecting the presence of a squid based transparent HTTP
- proxy. It also tries to detect the version number.
- """
- name = "Squid test"
- author = "Arturo Filastò"
- version = "0.1"
-
- optParameters = [['backend', 'b', 'http://ooni.nu/test/', 'Test backend to use']]
-
- #inputFile = ['urls', 'f', None, 'Urls file']
- inputs =['http://google.com']
- def test_cacheobject(self):
- """
- This detects the presence of a squid transparent HTTP proxy by sending
- a request for cache_object://localhost/info.
-
- The response to this request will usually also contain the squid
- version number.
- """
- log.debug("Running")
- def process_body(body):
- if "Access Denied." in body:
- self.report['transparent_http_proxy'] = True
- else:
- self.report['transparent_http_proxy'] = False
-
- log.msg("Testing Squid proxy presence by sending a request for "\
- "cache_object")
- headers = {}
- #headers["Host"] = [self.input]
- self.report['trans_http_proxy'] = None
- method = "GET"
- body = "cache_object://localhost/info"
- return self.doRequest(self.localOptions['backend'], method=method, body=body,
- headers=headers, body_processor=process_body)
-
- def test_search_bad_request(self):
- """
- Attempts to perform a request with a random invalid HTTP method.
-
- If we are being MITMed by a Transparent Squid HTTP proxy we will get
- back a response containing the X-Squid-Error header.
- """
- def process_headers(headers):
- log.debug("Processing headers in test_search_bad_request")
- if 'X-Squid-Error' in headers:
- log.msg("Detected the presence of a transparent HTTP "\
- "squid proxy")
- self.report['trans_http_proxy'] = True
- else:
- log.msg("Did not detect the presence of transparent HTTP "\
- "squid proxy")
- self.report['transparent_http_proxy'] = False
-
- log.msg("Testing Squid proxy presence by sending a random bad request")
- headers = {}
- #headers["Host"] = [self.input]
- method = utils.randomSTR(10, True)
- self.report['transparent_http_proxy'] = None
- return self.doRequest(self.localOptions['backend'], method=method,
- headers=headers, headers_processor=process_headers)
-
- def test_squid_headers(self):
- """
- Detects the presence of a squid transparent HTTP proxy based on the
- response headers it adds to the responses to requests.
- """
- def process_headers(headers):
- """
- Checks if any of the headers that squid is known to add match the
- squid regexp.
-
- We are looking for something that looks like this:
-
- via: 1.0 cache_server:3128 (squid/2.6.STABLE21)
- x-cache: MISS from cache_server
- x-cache-lookup: MISS from cache_server:3128
- """
- squid_headers = {'via': r'.* \((squid.*)\)',
- 'x-cache': r'MISS from (\w+)',
- 'x-cache-lookup': r'MISS from (\w+:?\d+?)'
- }
-
- self.report['transparent_http_proxy'] = False
- for key in squid_headers.keys():
- if key in headers:
- log.debug("Found %s in headers" % key)
- m = re.search(squid_headers[key], headers[key])
- if m:
- log.msg("Detected the presence of squid transparent"\
- " HTTP Proxy")
- self.report['transparent_http_proxy'] = True
-
- log.msg("Testing Squid proxy by looking at response headers")
- headers = {}
- #headers["Host"] = [self.input]
- method = "GET"
- self.report['transparent_http_proxy'] = None
- d = self.doRequest(self.localOptions['backend'], method=method,
- headers=headers, headers_processor=process_headers)
- return d
-
-
diff --git a/nettests/core/tcpconnect.py b/nettests/core/tcpconnect.py
deleted file mode 100644
index d0a53f8..0000000
--- a/nettests/core/tcpconnect.py
+++ /dev/null
@@ -1,51 +0,0 @@
-# -*- encoding: utf-8 -*-
-from twisted.internet.protocol import Factory, Protocol
-from twisted.internet.endpoints import TCP4ClientEndpoint
-
-from twisted.internet.error import ConnectionRefusedError
-from twisted.internet.error import TCPTimedOutError
-
-from ooni import nettest
-from ooni.utils import log
-
-class TCPFactory(Factory):
- def buildProtocol(self, addr):
- return Protocol()
-
-class TCPConnectTest(nettest.NetTestCase):
- name = "TCP Connect"
- author = "Arturo Filastò"
- version = "0.1"
- inputFile = ['file', 'f', None,
- 'File containing the IP:PORT combinations to be tested, one per line']
-
- requiredOptions = ['file']
- def test_connect(self):
- """
- This test performs a TCP connection to the remote host on the specified port.
- the report will contains the string 'success' if the test has
- succeeded, or the reason for the failure if it has failed.
- """
- host, port = self.input.split(":")
- def connectionSuccess(protocol):
- protocol.transport.loseConnection()
- log.debug("Got a connection to %s" % self.input)
- self.report["connection"] = 'success'
-
- def connectionFailed(failure):
- failure.trap(ConnectionRefusedError, TCPTimedOutError)
- log.debug("Unable to connect to %s" % self.input)
- if isinstance(failure, ConnectionRefusedError):
- self.report["connection"] = 'refused'
- elif isinstance(failure, TCPTimedOutError):
- self.report["connection"] = 'timeout'
- else:
- self.report["connection"] = 'failed'
-
- from twisted.internet import reactor
- point = TCP4ClientEndpoint(reactor, host, int(port))
- d = point.connect(TCPFactory())
- d.addCallback(connectionSuccess)
- d.addErrback(connectionFailed)
- return d
-
diff --git a/nettests/core/traceroute.py b/nettests/core/traceroute.py
deleted file mode 100644
index f8311fd..0000000
--- a/nettests/core/traceroute.py
+++ /dev/null
@@ -1,137 +0,0 @@
-# -*- encoding: utf-8 -*-
-#
-# :authors: Arturo Filastò
-# :licence: see LICENSE
-
-from twisted.python import usage
-from twisted.internet import defer
-
-from ooni.templates import scapyt
-
-from scapy.all import *
-
-from ooni.utils import log
-
-class UsageOptions(usage.Options):
- optParameters = [
- ['backend', 'b', '8.8.8.8', 'Test backend to use'],
- ['timeout', 't', 5, 'The timeout for the traceroute test'],
- ['maxttl', 'm', 30, 'The maximum value of ttl to set on packets'],
- ['srcport', 'p', None, 'Set the source port to a specific value (only applies to TCP and UDP)']
- ]
-
-class TracerouteTest(scapyt.BaseScapyTest):
- name = "Multi Protocol Traceroute Test"
- author = "Arturo Filastò"
- version = "0.1.1"
-
- usageOptions = UsageOptions
- dst_ports = [22, 23, 53, 80, 123, 443]
-
- def setUp(self):
- def get_sport(protocol):
- if self.localOptions['srcport']:
- return int(self.localOptions['srcport'])
- else:
- return random.randint(1024, 65535)
-
- self.get_sport = get_sport
-
- def max_ttl_and_timeout(self):
- max_ttl = int(self.localOptions['maxttl'])
- timeout = int(self.localOptions['timeout'])
- self.report['max_ttl'] = max_ttl
- self.report['timeout'] = timeout
- return max_ttl, timeout
-
-
- def postProcessor(self, report):
- tcp_hops = report['test_tcp_traceroute']
- udp_hops = report['test_udp_traceroute']
- icmp_hops = report['test_icmp_traceroute']
-
-
- def test_tcp_traceroute(self):
- """
- Does a traceroute to the destination by sending TCP SYN packets
- with TTLs from 1 until max_ttl.
- """
- def finished(packets, port):
- log.debug("Finished running TCP traceroute test on port %s" % port)
- answered, unanswered = packets
- self.report['hops_'+str(port)] = []
- for snd, rcv in answered:
- report = {'ttl': snd.ttl,
- 'address': rcv.src,
- 'rtt': rcv.time - snd.time,
- 'sport': snd[UDP].sport
- }
- log.debug("%s: %s" % (port, report))
- self.report['hops_'+str(port)].append(report)
-
- dl = []
- max_ttl, timeout = self.max_ttl_and_timeout()
- for port in self.dst_ports:
- packets = IP(dst=self.localOptions['backend'],
- ttl=(1,max_ttl),id=RandShort())/TCP(flags=0x2, dport=port,
- sport=self.get_sport('tcp'))
-
- d = self.sr(packets, timeout=timeout)
- d.addCallback(finished, port)
- dl.append(d)
- return defer.DeferredList(dl)
-
- def test_udp_traceroute(self):
- """
- Does a traceroute to the destination by sending UDP packets with empty
- payloads with TTLs from 1 until max_ttl.
- """
- def finished(packets, port):
- log.debug("Finished running UDP traceroute test on port %s" % port)
- answered, unanswered = packets
- self.report['hops_'+str(port)] = []
- for snd, rcv in answered:
- report = {'ttl': snd.ttl,
- 'address': rcv.src,
- 'rtt': rcv.time - snd.time,
- 'sport': snd[UDP].sport
- }
- log.debug("%s: %s" % (port, report))
- self.report['hops_'+str(port)].append(report)
- dl = []
- max_ttl, timeout = self.max_ttl_and_timeout()
- for port in self.dst_ports:
- packets = IP(dst=self.localOptions['backend'],
- ttl=(1,max_ttl),id=RandShort())/UDP(dport=port,
- sport=self.get_sport('udp'))
-
- d = self.sr(packets, timeout=timeout)
- d.addCallback(finished, port)
- dl.append(d)
- return defer.DeferredList(dl)
-
- def test_icmp_traceroute(self):
- """
- Does a traceroute to the destination by sending ICMP echo request
- packets with TTLs from 1 until max_ttl.
- """
- def finished(packets):
- log.debug("Finished running ICMP traceroute test")
- answered, unanswered = packets
- self.report['hops'] = []
- for snd, rcv in answered:
- report = {'ttl': snd.ttl,
- 'address': rcv.src,
- 'rtt': rcv.time - snd.time
- }
- log.debug("%s" % (report))
- self.report['hops'].append(report)
- dl = []
- max_ttl, timeout = self.max_ttl_and_timeout()
- packets = IP(dst=self.localOptions['backend'],
- ttl=(1,max_ttl), id=RandShort())/ICMP()
-
- d = self.sr(packets, timeout=timeout)
- d.addCallback(finished)
- return d
-
diff --git a/nettests/experimental/bridge_reachability/bridget.py b/nettests/experimental/bridge_reachability/bridget.py
new file mode 100644
index 0000000..acf3dff
--- /dev/null
+++ b/nettests/experimental/bridge_reachability/bridget.py
@@ -0,0 +1,462 @@
+#!/usr/bin/env python
+# -*- encoding: utf-8 -*-
+#
+# +-----------+
+# | BRIDGET |
+# | +--------------------------------------------+
+# +--------| Use a Tor process to test making a Tor |
+# | connection to a list of bridges or relays. |
+# +--------------------------------------------+
+#
+# :authors: Isis Lovecruft, Arturo Filasto
+# :licence: see included LICENSE
+# :version: 0.1.0-alpha
+
+from __future__ import with_statement
+from functools import partial
+from random import randint
+
+import os
+import sys
+
+from twisted.python import usage
+from twisted.internet import defer, error, reactor
+
+from ooni import nettest
+
+from ooni.utils import log, date
+from ooni.utils.config import ValueChecker
+
+from ooni.utils.onion import TxtorconImportError
+from ooni.utils.onion import PTNoBridgesException, PTNotFoundException
+
+
+try:
+ from ooni.utils.onion import parse_data_dir
+except:
+ log.msg("Please go to /ooni/lib and do 'make txtorcon' to run this test!")
+
+class MissingAssetException(Exception):
+ pass
+
+class RandomPortException(Exception):
+ """Raised when using a random port conflicts with configured ports."""
+ def __init__(self):
+ log.msg("Unable to use random and specific ports simultaneously")
+ return sys.exit()
+
+class BridgetArgs(usage.Options):
+ """Commandline options."""
+ allowed = "Port to use for Tor's %s, must be between 1024 and 65535."
+ sock_check = ValueChecker(allowed % "SocksPort").port_check
+ ctrl_check = ValueChecker(allowed % "ControlPort").port_check
+
+ optParameters = [
+ ['bridges', 'b', None,
+ 'File listing bridge IP:ORPorts to test'],
+ ['relays', 'f', None,
+ 'File listing relay IPs to test'],
+ ['socks', 's', 9049, None, sock_check],
+ ['control', 'c', 9052, None, ctrl_check],
+ ['torpath', 'p', None,
+ 'Path to the Tor binary to use'],
+ ['datadir', 'd', None,
+ 'Tor DataDirectory to use'],
+ ['transport', 't', None,
+ 'Tor ClientTransportPlugin'],
+ ['resume', 'r', 0,
+ 'Resume at this index']]
+ optFlags = [['random', 'x', 'Use random ControlPort and SocksPort']]
+
+ def postOptions(self):
+ if not self['bridges'] and not self['relays']:
+ raise MissingAssetException(
+ "Bridget can't run without bridges or relays to test!")
+ if self['transport']:
+ ValueChecker.uid_check(
+ "Can't run bridget as root with pluggable transports!")
+ if not self['bridges']:
+ raise PTNoBridgesException
+ if self['socks'] or self['control']:
+ if self['random']:
+ raise RandomPortException
+ if self['datadir']:
+ ValueChecker.dir_check(self['datadir'])
+ if self['torpath']:
+ ValueChecker.file_check(self['torpath'])
+
+class BridgetTest(nettest.NetTestCase):
+ """
+ XXX fill me in
+
+ :ivar config:
+ An :class:`ooni.lib.txtorcon.TorConfig` instance.
+ :ivar relays:
+ A list of all provided relays to test.
+ :ivar bridges:
+ A list of all provided bridges to test.
+ :ivar socks_port:
+ Integer for Tor's SocksPort.
+ :ivar control_port:
+ Integer for Tor's ControlPort.
+ :ivar transport:
+ String defining the Tor's ClientTransportPlugin, for testing
+ a bridge's pluggable transport functionality.
+ :ivar tor_binary:
+ Path to the Tor binary to use, e.g. \'/usr/sbin/tor\'
+ """
+ name = "bridget"
+ author = "Isis Lovecruft <isis@xxxxxxxxxxxxxx>"
+ version = "0.1"
+ description = "Use a Tor process to test connecting to bridges or relays"
+ usageOptions = BridgetArgs
+
+ def setUp(self):
+ """
+ Extra initialization steps. We only want one child Tor process
+ running, so we need to deal with most of the TorConfig() only once,
+ before the experiment runs.
+ """
+ self.socks_port = 9049
+ self.control_port = 9052
+ self.circuit_timeout = 90
+ self.tor_binary = '/usr/sbin/tor'
+ self.data_directory = None
+
+ def read_from_file(filename):
+ log.msg("Loading information from %s ..." % opt)
+ with open(filename) as fp:
+ lst = []
+ for line in fp.readlines():
+ if line.startswith('#'):
+ continue
+ else:
+ lst.append(line.replace('\n',''))
+ return lst
+
+ def __count_remaining__(which):
+ total, reach, unreach = map(lambda x: which[x],
+ ['all', 'reachable', 'unreachable'])
+ count = len(total) - reach() - unreach()
+ return count
+
+ ## XXX should we do report['bridges_up'].append(self.bridges['current'])
+ self.bridges = {}
+ self.bridges['all'], self.bridges['up'], self.bridges['down'] = \
+ ([] for i in range(3))
+ self.bridges['reachable'] = lambda: len(self.bridges['up'])
+ self.bridges['unreachable'] = lambda: len(self.bridges['down'])
+ self.bridges['remaining'] = lambda: __count_remaining__(self.bridges)
+ self.bridges['current'] = None
+ self.bridges['pt_type'] = None
+ self.bridges['use_pt'] = False
+
+ self.relays = {}
+ self.relays['all'], self.relays['up'], self.relays['down'] = \
+ ([] for i in range(3))
+ self.relays['reachable'] = lambda: len(self.relays['up'])
+ self.relays['unreachable'] = lambda: len(self.relays['down'])
+ self.relays['remaining'] = lambda: __count_remaining__(self.relays)
+ self.relays['current'] = None
+
+ if self.localOptions:
+ try:
+ from txtorcon import TorConfig
+ except ImportError:
+ raise TxtorconImportError
+ else:
+ self.config = TorConfig()
+ finally:
+ options = self.localOptions
+
+ if options['bridges']:
+ self.config.UseBridges = 1
+ self.bridges['all'] = read_from_file(options['bridges'])
+ if options['relays']:
+ ## first hop must be in TorState().guards
+ # XXX where is this defined?
+ self.config.EntryNodes = ','.join(relay_list)
+ self.relays['all'] = read_from_file(options['relays'])
+ if options['socks']:
+ self.socks_port = options['socks']
+ if options['control']:
+ self.control_port = options['control']
+ if options['random']:
+ log.msg("Using randomized ControlPort and SocksPort ...")
+ self.socks_port = randint(1024, 2**16)
+ self.control_port = randint(1024, 2**16)
+ if options['torpath']:
+ self.tor_binary = options['torpath']
+ if options['datadir']:
+ self.data_directory = parse_data_dir(options['datadir'])
+ if options['transport']:
+ ## ClientTransportPlugin transport exec pathtobinary [options]
+ ## XXX we need a better way to deal with all PTs
+ log.msg("Using ClientTransportPlugin %s" % options['transport'])
+ self.bridges['use_pt'] = True
+ [self.bridges['pt_type'], pt_exec] = \
+ options['transport'].split(' ', 1)
+
+ if self.bridges['pt_type'] == "obfs2":
+ self.config.ClientTransportPlugin = \
+ self.bridges['pt_type'] + " " + pt_exec
+ else:
+ raise PTNotFoundException
+
+ self.config.SocksPort = self.socks_port
+ self.config.ControlPort = self.control_port
+ self.config.CookieAuthentication = 1
+
+ def test_bridget(self):
+ """
+ if bridges:
+ 1. configure first bridge line
+ 2a. configure data_dir, if it doesn't exist
+ 2b. write torrc to a tempfile in data_dir
+ 3. start tor } if any of these
+ 4. remove bridges which are public relays } fail, add current
+ 5. SIGHUP for each bridge } bridge to unreach-
+ } able bridges.
+ if relays:
+ 1a. configure the data_dir, if it doesn't exist
+ 1b. write torrc to a tempfile in data_dir
+ 2. start tor
+ 3. remove any of our relays which are already part of current
+ circuits
+ 4a. attach CustomCircuit() to self.state
+ 4b. RELAY_EXTEND for each relay } if this fails, add
+ } current relay to list
+ } of unreachable relays
+ 5.
+ if bridges and relays:
+ 1. configure first bridge line
+ 2a. configure data_dir if it doesn't exist
+ 2b. write torrc to a tempfile in data_dir
+ 3. start tor
+ 4. remove bridges which are public relays
+ 5. remove any of our relays which are already part of current
+ circuits
+ 6a. attach CustomCircuit() to self.state
+ 6b. for each bridge, build three circuits, with three
+ relays each
+ 6c. RELAY_EXTEND for each relay } if this fails, add
+ } current relay to list
+ } of unreachable relays
+
+ :param args:
+ The :class:`BridgetAsset` line currently being used. Except that it
+ in Bridget it doesn't, so it should be ignored and avoided.
+ """
+ try:
+ from ooni.utils import process
+ from ooni.utils.onion import remove_public_relays, start_tor
+ from ooni.utils.onion import start_tor_filter_nodes
+ from ooni.utils.onion import setup_fail, setup_done
+ from ooni.utils.onion import CustomCircuit
+ from ooni.utils.timer import deferred_timeout, TimeoutError
+ from ooni.lib.txtorcon import TorConfig, TorState
+ except ImportError:
+ raise TxtorconImportError
+ except TxtorconImportError, tie:
+ log.err(tie)
+ sys.exit()
+
+ def reconfigure_done(state, bridges):
+ """
+ Append :ivar:`bridges['current']` to the list
+ :ivar:`bridges['up'].
+ """
+ log.msg("Reconfiguring with 'Bridge %s' successful"
+ % bridges['current'])
+ bridges['up'].append(bridges['current'])
+ return state
+
+ def reconfigure_fail(state, bridges):
+ """
+ Append :ivar:`bridges['current']` to the list
+ :ivar:`bridges['down'].
+ """
+ log.msg("Reconfiguring TorConfig with parameters %s failed"
+ % state)
+ bridges['down'].append(bridges['current'])
+ return state
+
+ @defer.inlineCallbacks
+ def reconfigure_bridge(state, bridges):
+ """
+ Rewrite the Bridge line in our torrc. If use of pluggable
+ transports was specified, rewrite the line as:
+ Bridge <transport_type> <IP>:<ORPort>
+ Otherwise, rewrite in the standard form:
+ Bridge <IP>:<ORPort>
+
+ :param state:
+ A fully bootstrapped instance of
+ :class:`ooni.lib.txtorcon.TorState`.
+ :param bridges:
+ A dictionary of bridges containing the following keys:
+
+ bridges['remaining'] :: A function returning and int for the
+ number of remaining bridges to test.
+ bridges['current'] :: A string containing the <IP>:<ORPort>
+ of the current bridge.
+ bridges['use_pt'] :: A boolean, True if we're testing
+ bridges with a pluggable transport;
+ False otherwise.
+ bridges['pt_type'] :: If :ivar:`bridges['use_pt'] is True,
+ this is a string containing the type
+ of pluggable transport to test.
+ :return:
+ :param:`state`
+ """
+ log.msg("Current Bridge: %s" % bridges['current'])
+ log.msg("We now have %d bridges remaining to test..."
+ % bridges['remaining']())
+ try:
+ if bridges['use_pt'] is False:
+ controller_response = yield state.protocol.set_conf(
+ 'Bridge', bridges['current'])
+ elif bridges['use_pt'] and bridges['pt_type'] is not None:
+ controller_reponse = yield state.protocol.set_conf(
+ 'Bridge', bridges['pt_type'] +' '+ bridges['current'])
+ else:
+ raise PTNotFoundException
+
+ if controller_response == 'OK':
+ finish = yield reconfigure_done(state, bridges)
+ else:
+ log.err("SETCONF for %s responded with error:\n %s"
+ % (bridges['current'], controller_response))
+ finish = yield reconfigure_fail(state, bridges)
+
+ defer.returnValue(finish)
+
+ except Exception, e:
+ log.err("Reconfiguring torrc with Bridge line %s failed:\n%s"
+ % (bridges['current'], e))
+ defer.returnValue(None)
+
+ def attacher_extend_circuit(attacher, deferred, router):
+ ## XXX todo write me
+ ## state.attacher.extend_circuit
+ raise NotImplemented
+ #attacher.extend_circuit
+
+ def state_attach(state, path):
+ log.msg("Setting up custom circuit builder...")
+ attacher = CustomCircuit(state)
+ state.set_attacher(attacher, reactor)
+ state.add_circuit_listener(attacher)
+ return state
+
+ ## OLD
+ #for circ in state.circuits.values():
+ # for relay in circ.path:
+ # try:
+ # relay_list.remove(relay)
+ # except KeyError:
+ # continue
+ ## XXX how do we attach to circuits with bridges?
+ d = defer.Deferred()
+ attacher.request_circuit_build(d)
+ return d
+
+ def state_attach_fail(state):
+ log.err("Attaching custom circuit builder failed: %s" % state)
+
+ log.msg("Bridget: initiating test ... ") ## Start the experiment
+
+ ## if we've at least one bridge, and our config has no 'Bridge' line
+ if self.bridges['remaining']() >= 1 \
+ and not 'Bridge' in self.config.config:
+
+ ## configure our first bridge line
+ self.bridges['current'] = self.bridges['all'][0]
+ self.config.Bridge = self.bridges['current']
+ ## avoid starting several
+ self.config.save() ## processes
+ assert self.config.config.has_key('Bridge'), "No Bridge Line"
+
+ ## start tor and remove bridges which are public relays
+ from ooni.utils.onion import start_tor_filter_nodes
+ state = start_tor_filter_nodes(reactor, self.config,
+ self.control_port, self.tor_binary,
+ self.data_directory, self.bridges)
+ #controller = defer.Deferred()
+ #controller.addCallback(singleton_semaphore, tor)
+ #controller.addErrback(setup_fail)
+ #bootstrap = defer.gatherResults([controller, filter_bridges],
+ # consumeErrors=True)
+
+ if state is not None:
+ log.debug("state:\n%s" % state)
+ log.debug("Current callbacks on TorState():\n%s"
+ % state.callbacks)
+
+ ## if we've got more bridges
+ if self.bridges['remaining']() >= 2:
+ #all = []
+ for bridge in self.bridges['all'][1:]:
+ self.bridges['current'] = bridge
+ #new = defer.Deferred()
+ #new.addCallback(reconfigure_bridge, state, self.bridges)
+ #all.append(new)
+ #check_remaining = defer.DeferredList(all, consumeErrors=True)
+ #state.chainDeferred(check_remaining)
+ state.addCallback(reconfigure_bridge, self.bridges)
+
+ if self.relays['remaining']() > 0:
+ while self.relays['remaining']() >= 3:
+ #path = list(self.relays.pop() for i in range(3))
+ #log.msg("Trying path %s" % '->'.join(map(lambda node:
+ # node, path)))
+ self.relays['current'] = self.relays['all'].pop()
+ for circ in state.circuits.values():
+ for node in circ.path:
+ if node == self.relays['current']:
+ self.relays['up'].append(self.relays['current'])
+ if len(circ.path) < 3:
+ try:
+ ext = attacher_extend_circuit(state.attacher, circ,
+ self.relays['current'])
+ ext.addCallback(attacher_extend_circuit_done,
+ state.attacher, circ,
+ self.relays['current'])
+ except Exception, e:
+ log.err("Extend circuit failed: %s" % e)
+ else:
+ continue
+
+ #state.callback(all)
+ #self.reactor.run()
+ return state
+
+ def disabled_startTest(self, args):
+ """
+ Local override of :meth:`OONITest.startTest` to bypass calling
+ self.control.
+
+ :param args:
+ The current line of :class:`Asset`, not used but kept for
+ compatibility reasons.
+ :return:
+ A fired deferred which callbacks :meth:`experiment` and
+ :meth:`OONITest.finished`.
+ """
+ self.start_time = date.now()
+ self.d = self.experiment(args)
+ self.d.addErrback(log.err)
+ self.d.addCallbacks(self.finished, log.err)
+ return self.d
+
+## ISIS' NOTES
+## -----------
+## TODO:
+## x cleanup documentation
+## x add DataDirectory option
+## x check if bridges are public relays
+## o take bridge_desc file as input, also be able to give same
+## format as output
+## x Add asynchronous timeout for deferred, so that we don't wait
+## o Add assychronous timout for deferred, so that we don't wait
+## forever for bridges that don't work.
diff --git a/nettests/experimental/bridge_reachability/echo.py b/nettests/experimental/bridge_reachability/echo.py
new file mode 100644
index 0000000..d4033dd
--- /dev/null
+++ b/nettests/experimental/bridge_reachability/echo.py
@@ -0,0 +1,132 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+#
+# +---------+
+# | echo.py |
+# +---------+
+# A simple ICMP-8 ping test.
+#
+# @authors: Isis Lovecruft, <isis@xxxxxxxxxxxxxx>
+# @version: 0.0.2-pre-alpha
+# @license: copyright (c) 2012 Isis Lovecruft
+# see attached LICENCE file
+#
+
+import os
+import sys
+
+from twisted.python import usage
+from twisted.internet import reactor, defer
+from ooni import nettest
+from ooni.utils import log, net, Storage, txscapy
+
+try:
+ from scapy.all import IP, ICMP
+ from scapy.all import sr1
+ from ooni.lib import txscapy
+ from ooni.lib.txscapy import txsr, txsend
+ from ooni.templates.scapyt import BaseScapyTest
+except:
+ log.msg("This test requires scapy, see www.secdev.org/projects/scapy")
+
+class UsageOptions(usage.Options):
+ optParameters = [
+ ['dst', 'd', None, 'Host IP to ping'],
+ ['file', 'f', None, 'File of list of IPs to ping'],
+ ['interface', 'i', None, 'Network interface to use'],
+ ['count', 'c', 1, 'Number of packets to send', int],
+ ['size', 's', 56, 'Number of bytes to send in ICMP data field', int],
+ ['ttl', 'l', 25, 'Set the IP Time to Live', int],
+ ['timeout', 't', 2, 'Seconds until timeout if no response', int],
+ ['pcap', 'p', None, 'Save pcap to this file'],
+ ['receive', 'r', True, 'Receive response packets']]
+
+class EchoTest(nettest.NetTestCase):
+ """
+ xxx fill me in
+ """
+ name = 'echo'
+ author = 'Isis Lovecruft <isis@xxxxxxxxxxxxxx>'
+ description = 'A simple ping test to see if a host is reachable.'
+ version = '0.0.2'
+ requiresRoot = True
+
+ usageOptions = UsageOptions
+ #requiredOptions = ['dst']
+
+ def setUp(self, *a, **kw):
+ self.destinations = {}
+
+ if self.localOptions:
+ for key, value in self.localOptions.items():
+ log.debug("setting self.%s = %s" % (key, value))
+ setattr(self, key, value)
+
+ self.timeout *= 1000 ## convert to milliseconds
+
+ if not self.interface:
+ try:
+ iface = txscapy.getDefaultIface()
+ except Exception, e:
+ log.msg("No network interface specified!")
+ log.err(e)
+ else:
+ log.msg("Using system default interface: %s" % iface)
+ self.interface = iface
+
+ if self.pcap:
+ try:
+ self.pcapfile = open(self.pcap, 'a+')
+ except:
+ log.msg("Unable to write to pcap file %s" % self.pcap)
+ else:
+ self.pcap = net.capturePacket(self.pcapfile)
+
+ if not self.dst:
+ if self.file:
+ self.dstProcessor(self.file)
+ for key, value in self.destinations.items():
+ for label, data in value.items():
+ if not 'ans' in data:
+ self.dst = label
+ else:
+ self.addDest(self.dst)
+ log.debug("self.dst is now: %s" % self.dst)
+
+ log.debug("Initialization of %s test completed." % self.name)
+
+ def addDest(self, dest):
+ d = dest.strip()
+ self.destinations[d] = {'dst_ip': d}
+
+ def dstProcessor(self, inputfile):
+ from ipaddr import IPAddress
+
+ if os.path.isfile(inputfile):
+ with open(inputfile) as f:
+ for line in f.readlines():
+ if line.startswith('#'):
+ continue
+ self.addDest(line)
+
+ def test_icmp(self):
+ def process_response(echo_reply, dest):
+ ans, unans = echo_reply
+ if ans:
+ log.msg("Recieved echo reply from %s: %s" % (dest, ans))
+ else:
+ log.msg("No reply was received from %s. Possible censorship event." % dest)
+ log.debug("Unanswered packets: %s" % unans)
+ self.report[dest] = echo_reply
+
+ for label, data in self.destinations.items():
+ reply = sr1(IP(dst=lebal)/ICMP())
+ process = process_reponse(reply, label)
+
+ #(ans, unans) = ping
+ #self.destinations[self.dst].update({'ans': ans,
+ # 'unans': unans,
+ # 'response_packet': ping})
+ #return ping
+
+ #return reply
diff --git a/nettests/experimental/chinatrigger.py b/nettests/experimental/chinatrigger.py
new file mode 100644
index 0000000..de1f64d
--- /dev/null
+++ b/nettests/experimental/chinatrigger.py
@@ -0,0 +1,108 @@
+import random
+import string
+import struct
+import time
+
+from twisted.python import usage
+from ooni.templates.scapyt import BaseScapyTest
+
+class UsageOptions(usage.Options):
+ optParameters = [['dst', 'd', None, 'Specify the target address'],
+ ['port', 'p', None, 'Specify the target port']
+ ]
+
+class ChinaTriggerTest(BaseScapyTest):
+ """
+ This test is a OONI based implementation of the C tool written
+ by Philipp Winter to engage chinese probes in active scanning.
+
+ Example of running it:
+ ./bin/ooniprobe chinatrigger -d 127.0.0.1 -p 8080
+ """
+
+ name = "chinatrigger"
+ usageOptions = UsageOptions
+ requiredOptions = ['dst', 'port']
+ timeout = 2
+
+ def setUp(self):
+ self.dst = self.localOptions['dst']
+ self.port = int(self.localOptions['port'])
+
+ @staticmethod
+ def set_random_servername(pkt):
+ ret = pkt[:121]
+ for i in range(16):
+ ret += random.choice(string.ascii_lowercase)
+ ret += pkt[121+16:]
+ return ret
+
+ @staticmethod
+ def set_random_time(pkt):
+ ret = pkt[:11]
+ ret += struct.pack('!I', int(time.time()))
+ ret += pkt[11+4:]
+ return ret
+
+ @staticmethod
+ def set_random_field(pkt):
+ ret = pkt[:15]
+ for i in range(28):
+ ret += chr(random.randint(0, 255))
+ ret += pkt[15+28:]
+ return ret
+
+ @staticmethod
+ def mutate(pkt, idx):
+ """
+ Slightly changed mutate function.
+ """
+ ret = pkt[:idx-1]
+ mutation = chr(random.randint(0, 255))
+ while mutation == pkt[idx]:
+ mutation = chr(random.randint(0, 255))
+ ret += mutation
+ ret += pkt[idx:]
+ return ret
+
+ @staticmethod
+ def set_all_random_fields(pkt):
+ pkt = ChinaTriggerTest.set_random_servername(pkt)
+ pkt = ChinaTriggerTest.set_random_time(pkt)
+ pkt = ChinaTriggerTest.set_random_field(pkt)
+ return pkt
+
+ def test_send_mutations(self):
+ from scapy.all import IP, TCP
+ pkt = "\x16\x03\x01\x00\xcc\x01\x00\x00\xc8"\
+ "\x03\x01\x4f\x12\xe5\x63\x3f\xef\x7d"\
+ "\x20\xb9\x94\xaa\x04\xb0\xc1\xd4\x8c"\
+ "\x50\xcd\xe2\xf9\x2f\xa9\xfb\x78\xca"\
+ "\x02\xa8\x73\xe7\x0e\xa8\xf9\x00\x00"\
+ "\x3a\xc0\x0a\xc0\x14\x00\x39\x00\x38"\
+ "\xc0\x0f\xc0\x05\x00\x35\xc0\x07\xc0"\
+ "\x09\xc0\x11\xc0\x13\x00\x33\x00\x32"\
+ "\xc0\x0c\xc0\x0e\xc0\x02\xc0\x04\x00"\
+ "\x04\x00\x05\x00\x2f\xc0\x08\xc0\x12"\
+ "\x00\x16\x00\x13\xc0\x0d\xc0\x03\xfe"\
+ "\xff\x00\x0a\x00\xff\x01\x00\x00\x65"\
+ "\x00\x00\x00\x1d\x00\x1b\x00\x00\x18"\
+ "\x77\x77\x77\x2e\x67\x6e\x6c\x69\x67"\
+ "\x78\x7a\x70\x79\x76\x6f\x35\x66\x76"\
+ "\x6b\x64\x2e\x63\x6f\x6d\x00\x0b\x00"\
+ "\x04\x03\x00\x01\x02\x00\x0a\x00\x34"\
+ "\x00\x32\x00\x01\x00\x02\x00\x03\x00"\
+ "\x04\x00\x05\x00\x06\x00\x07\x00\x08"\
+ "\x00\x09\x00\x0a\x00\x0b\x00\x0c\x00"\
+ "\x0d\x00\x0e\x00\x0f\x00\x10\x00\x11"\
+ "\x00\x12\x00\x13\x00\x14\x00\x15\x00"\
+ "\x16\x00\x17\x00\x18\x00\x19\x00\x23"\
+ "\x00\x00"
+
+ pkt = ChinaTriggerTest.set_all_random_fields(pkt)
+ pkts = [IP(dst=self.dst)/TCP(dport=self.port)/pkt]
+ for x in range(len(pkt)):
+ mutation = IP(dst=self.dst)/TCP(dport=self.port)/ChinaTriggerTest.mutate(pkt, x)
+ pkts.append(mutation)
+ return self.sr(pkts, timeout=2)
+
diff --git a/nettests/experimental/daphn3.py b/nettests/experimental/daphn3.py
new file mode 100644
index 0000000..09279fa
--- /dev/null
+++ b/nettests/experimental/daphn3.py
@@ -0,0 +1,119 @@
+# -*- encoding: utf-8 -*-
+from twisted.python import usage
+from twisted.internet import protocol, endpoints, reactor
+
+from ooni import nettest
+from ooni.kit import daphn3
+from ooni.utils import log
+
+class Daphn3ClientProtocol(daphn3.Daphn3Protocol):
+ def nextStep(self):
+ log.debug("Moving on to next step in the state walk")
+ self.current_data_received = 0
+ if self.current_step >= (len(self.steps) - 1):
+ log.msg("Reached the end of the state machine")
+ log.msg("Censorship fingerpint bisected!")
+ step_idx, mutation_idx = self.factory.mutation
+ log.msg("step_idx: %s | mutation_id: %s" % (step_idx, mutation_idx))
+ #self.transport.loseConnection()
+ if self.report:
+ self.report['mutation_idx'] = mutation_idx
+ self.report['step_idx'] = step_idx
+ self.d.callback(None)
+ return
+ else:
+ self.current_step += 1
+ if self._current_step_role() == self.role:
+ # We need to send more data because we are again responsible for
+ # doing so.
+ self.sendPayload()
+
+
+class Daphn3ClientFactory(protocol.ClientFactory):
+ protocol = daphn3.Daphn3Protocol
+ mutation = [0,0]
+ steps = None
+
+ def buildProtocol(self, addr):
+ p = self.protocol()
+ p.steps = self.steps
+ p.factory = self
+ return p
+
+ def startedConnecting(self, connector):
+ log.msg("Started connecting %s" % connector)
+
+ def clientConnectionFailed(self, reason, connector):
+ log.err("We failed connecting the the OONIB")
+ log.err("Cannot perform test. Perhaps it got blocked?")
+ log.err("Please report this to tor-assistants@xxxxxxxxxxxxxx")
+
+ def clientConnectionLost(self, reason, connector):
+ log.err("Daphn3 client connection lost")
+ print reason
+
+class daphn3Args(usage.Options):
+ optParameters = [
+ ['host', 'h', '127.0.0.1', 'Target Hostname'],
+ ['port', 'p', 57003, 'Target port number']]
+
+ optFlags = [['pcap', 'c', 'Specify that the input file is a pcap file'],
+ ['yaml', 'y', 'Specify that the input file is a YAML file (default)']]
+
+class daphn3Test(nettest.NetTestCase):
+
+ name = "Daphn3"
+ usageOptions = daphn3Args
+ inputFile = ['file', 'f', None,
+ 'Specify the pcap or YAML file to be used as input to the test']
+
+ #requiredOptions = ['file']
+
+ steps = None
+
+ def inputProcessor(self, filename):
+ """
+ step_idx is the step in the packet exchange
+ ex.
+ [.X.] are packets sent by a client or a server
+
+ client: [.1.] [.3.] [.4.]
+ server: [.2.] [.5.]
+
+ mutation_idx: is the sub index of the packet as in the byte of the
+ packet at the step_idx that is to be mutated
+
+ """
+ if self.localOptions['pcap']:
+ daphn3Steps = daphn3.read_pcap(filename)
+ else:
+ daphn3Steps = daphn3.read_yaml(filename)
+ log.debug("Loaded these steps %s" % daphn3Steps)
+ yield daphn3Steps
+
+ def test_daphn3(self):
+ host = self.localOptions['host']
+ port = int(self.localOptions['port'])
+
+ def failure(failure):
+ log.msg("Failed to connect")
+ self.report['censored'] = True
+ self.report['mutation'] = 0
+ raise Exception("Error in connection, perhaps the backend is censored")
+ return
+
+ def success(protocol):
+ log.msg("Successfully connected")
+ protocol.sendPayload()
+ return protocol.d
+
+ log.msg("Connecting to %s:%s" % (host, port))
+ endpoint = endpoints.TCP4ClientEndpoint(reactor, host, port)
+ daphn3_factory = Daphn3ClientFactory()
+ daphn3_factory.steps = self.input
+ daphn3_factory.report = self.report
+ d = endpoint.connect(daphn3_factory)
+ d.addErrback(failure)
+ d.addCallback(success)
+ return d
+
diff --git a/nettests/experimental/http_keyword_filtering.py b/nettests/experimental/http_keyword_filtering.py
new file mode 100644
index 0000000..0ae9c52
--- /dev/null
+++ b/nettests/experimental/http_keyword_filtering.py
@@ -0,0 +1,45 @@
+# -*- encoding: utf-8 -*-
+#
+# :authors: Arturo Filastò
+# :licence: see LICENSE
+
+from twisted.python import usage
+
+from ooni.templates import httpt
+
+class UsageOptions(usage.Options):
+ optParameters = [['backend', 'b', 'http://127.0.0.1:57001',
+ 'URL of the test backend to use']]
+
+class HTTPKeywordFiltering(httpt.HTTPTest):
+ """
+ This test involves performing HTTP requests containing to be tested for
+ censorship keywords.
+
+ It does not detect censorship on the client, but just logs the response from the
+ HTTP backend server.
+ """
+ name = "HTTP Keyword Filtering"
+ author = "Arturo Filastò"
+ version = "0.1.1"
+
+ inputFile = ['file', 'f', None, 'List of keywords to use for censorship testing']
+
+ usageOptions = UsageOptions
+
+ requiredOptions = ['backend']
+
+ def test_get(self):
+ """
+ Perform a HTTP GET request to the backend containing the keyword to be
+ tested inside of the request body.
+ """
+ return self.doRequest(self.localOptions['backend'], method="GET", body=self.input)
+
+ def test_post(self):
+ """
+ Perform a HTTP POST request to the backend containing the keyword to be
+ tested inside of the request body.
+ """
+ return self.doRequest(self.localOptions['backend'], method="POST", body=self.input)
+
diff --git a/nettests/experimental/http_uk_mobile_networks.py b/nettests/experimental/http_uk_mobile_networks.py
new file mode 100644
index 0000000..784a9e9
--- /dev/null
+++ b/nettests/experimental/http_uk_mobile_networks.py
@@ -0,0 +1,85 @@
+# -*- encoding: utf-8 -*-
+import yaml
+
+from twisted.python import usage
+from twisted.plugin import IPlugin
+
+from ooni.templates import httpt
+from ooni.utils import log
+
+class UsageOptions(usage.Options):
+ """
+ See https://github.com/hellais/ooni-inputs/processed/uk_mobile_networks_redirects.yaml
+ to see how the rules file should look like.
+ """
+ optParameters = [
+ ['rules', 'y', None,
+ 'Specify the redirect rules file ']
+ ]
+
+class HTTPUKMobileNetworksTest(httpt.HTTPTest):
+ """
+ This test was thought of by Open Rights Group and implemented with the
+ purpose of detecting censorship in the UK.
+ For more details on this test see:
+ https://trac.torproject.org/projects/tor/ticket/6437
+ XXX port the knowledge from the trac ticket into this test docstring
+ """
+ name = "HTTP UK mobile network redirect test"
+
+ usageOptions = UsageOptions
+
+ followRedirects = True
+
+ inputFile = ['urls', 'f', None, 'List of urls one per line to test for censorship']
+ requiredOptions = ['urls']
+
+ def testPattern(self, value, pattern, type):
+ if type == 'eq':
+ return value == pattern
+ elif type == 're':
+ import re
+ if re.match(pattern, value):
+ return True
+ else:
+ return False
+ else:
+ return None
+
+ def testPatterns(self, patterns, location):
+ test_result = False
+
+ if type(patterns) == list:
+ for pattern in patterns:
+ test_result |= self.testPattern(location, pattern['value'], pattern['type'])
+ rules_file = self.localOptions['rules']
+
+ return test_result
+
+ def testRules(self, rules, location):
+ result = {}
+ blocked = False
+ for rule, value in rules.items():
+ current_rule = {}
+ current_rule['name'] = value['name']
+ current_rule['patterns'] = value['patterns']
+ current_rule['test'] = self.testPatterns(value['patterns'], location)
+ blocked |= current_rule['test']
+ result[rule] = current_rule
+ result['blocked'] = blocked
+ return result
+
+ def processRedirect(self, location):
+ self.report['redirect'] = None
+ rules_file = self.localOptions['rules']
+
+ fp = open(rules_file)
+ rules = yaml.safe_load(fp)
+ fp.close()
+
+ log.msg("Testing rules %s" % rules)
+ redirect = self.testRules(rules, location)
+ self.report['redirect'] = redirect
+
+
+
diff --git a/nettests/experimental/keyword_filtering.py b/nettests/experimental/keyword_filtering.py
new file mode 100644
index 0000000..9eec4ff
--- /dev/null
+++ b/nettests/experimental/keyword_filtering.py
@@ -0,0 +1,52 @@
+# -*- encoding: utf-8 -*-
+#
+# :authors: Arturo Filastò
+# :licence: see LICENSE
+
+from twisted.python import usage
+from twisted.internet import defer
+
+from ooni.utils import log
+from ooni.templates import scapyt
+
+from scapy.all import *
+
+class UsageOptions(usage.Options):
+ optParameters = [
+ ['backend', 'b', '127.0.0.1:57002', 'Test backend running TCP echo'],
+ ['timeout', 't', 5, 'Timeout after which to give up waiting for RST packets']
+ ]
+
+class KeywordFiltering(scapyt.BaseScapyTest):
+ name = "Keyword Filtering detection based on RST packets"
+ author = "Arturo Filastò"
+ version = "0.1"
+
+ usageOptions = UsageOptions
+
+ inputFile = ['file', 'f', None,
+ 'List of keywords to use for censorship testing']
+
+ def test_tcp_keyword_filtering(self):
+ """
+ Places the keyword to be tested in the payload of a TCP packet.
+ XXX need to implement bisection method for enumerating keywords.
+ though this should not be an issue since we are testing all
+ the keywords in parallel.
+ """
+ def finished(packets):
+ log.debug("Finished running TCP traceroute test on port %s" % port)
+ answered, unanswered = packets
+ self.report['rst_packets'] = []
+ for snd, rcv in answered:
+ # The received packet has the RST flag
+ if rcv[TCP].flags == 4:
+ self.report['rst_packets'].append(rcv)
+
+ backend_ip, backend_port = self.localOptions['backend']
+ keyword_to_test = str(self.input)
+ packets = IP(dst=backend_ip,id=RandShort())/TCP(dport=backend_port)/keyword_to_test
+ d = self.sr(packets, timeout=timeout)
+ d.addCallback(finished)
+ return d
+
diff --git a/nettests/experimental/parasitictraceroute.py b/nettests/experimental/parasitictraceroute.py
new file mode 100644
index 0000000..631c24b
--- /dev/null
+++ b/nettests/experimental/parasitictraceroute.py
@@ -0,0 +1,129 @@
+# -*- encoding: utf-8 -*-
+#
+# :authors: Arturo Filastò
+# :licence: see LICENSE
+
+from twisted.python import usage
+from twisted.internet import defer
+
+from ooni.templates import scapyt
+
+from scapy.all import *
+
+from ooni.utils import log
+
+class UsageOptions(usage.Options):
+ optParameters = [['backend', 'b', 'google.com', 'Test backend to use'],
+ ['timeout', 't', 5, 'The timeout for the traceroute test'],
+ ['maxttl', 'm', 64, 'The maximum value of ttl to set on packets'],
+ ['dstport', 'd', 80, 'Set the destination port of the traceroute test'],
+ ['srcport', 'p', None, 'Set the source port to a specific value']]
+
+class ParasiticalTracerouteTest(scapyt.BaseScapyTest):
+ name = "Parasitic TCP Traceroute Test"
+ author = "Arturo Filastò"
+ version = "0.1"
+
+ usageOptions = UsageOptions
+
+ def setUp(self):
+ def get_sport():
+ if self.localOptions['srcport']:
+ return int(self.localOptions['srcport'])
+ else:
+ return random.randint(1024, 65535)
+ self.get_sport = get_sport
+
+ self.dst_ip = socket.gethostbyaddr(self.localOptions['backend'])[2][0]
+
+ self.dport = int(self.localOptions['dstport'])
+ self.max_ttl = int(self.localOptions['maxttl'])
+
+ @defer.inlineCallbacks
+ def test_parasitic_tcp_traceroute(self):
+ """
+ Establishes a TCP stream, then sequentially sends TCP packets with
+ increasing TTL until we reach the ttl of the destination.
+
+ Requires the backend to respond with an ACK to our SYN packet (i.e.
+ the port must be open)
+
+ XXX this currently does not work properly. The problem lies in the fact
+ that we are currently using the scapy layer 3 socket. This socket makes
+ packets received be trapped by the kernel TCP stack, therefore when we
+ send out a SYN and get back a SYN-ACK the kernel stack will reply with
+ a RST because it did not send a SYN.
+
+ The quick fix to this would be to establish a TCP stream using socket
+ calls and then "cannibalizing" the TCP session with scapy.
+
+ The real fix is to make scapy use libpcap instead of raw sockets
+ obviously as we previously did... arg.
+ """
+ sport = self.get_sport()
+ dport = self.dport
+ ipid = int(RandShort())
+
+ ip_layer = IP(dst=self.dst_ip,
+ id=ipid, ttl=self.max_ttl)
+
+ syn = ip_layer/TCP(sport=sport, dport=dport, flags="S", seq=0)
+
+ log.msg("Sending...")
+ syn.show2()
+
+ synack = yield self.sr1(syn)
+
+ log.msg("Got response...")
+ synack.show2()
+
+ if not synack:
+ log.err("Got no response. Try increasing max_ttl")
+ return
+
+ if synack[TCP].flags == 11:
+ log.msg("Got back a FIN ACK. The destination port is closed")
+ return
+
+ elif synack[TCP].flags == 18:
+ log.msg("Got a SYN ACK. All is well.")
+ else:
+ log.err("Got an unexpected result")
+ return
+
+ ack = ip_layer/TCP(sport=synack.dport,
+ dport=dport, flags="A",
+ seq=synack.ack, ack=synack.seq + 1)
+
+ yield self.send(ack)
+
+ self.report['hops'] = []
+ # For the time being we make the assumption that we are NATted and
+ # that the NAT will forward the packet to the destination even if the TTL has
+ for ttl in range(1, self.max_ttl):
+ log.msg("Sending packet with ttl of %s" % ttl)
+ ip_layer.ttl = ttl
+ empty_tcp_packet = ip_layer/TCP(sport=synack.dport,
+ dport=dport, flags="A",
+ seq=synack.ack, ack=synack.seq + 1)
+
+ answer = yield self.sr1(empty_tcp_packet)
+ if not answer:
+ log.err("Got no response for ttl %s" % ttl)
+ continue
+
+ try:
+ icmp = answer[ICMP]
+ report = {'ttl': empty_tcp_packet.ttl,
+ 'address': answer.src,
+ 'rtt': answer.time - empty_tcp_packet.time
+ }
+ log.msg("%s: %s" % (dport, report))
+ self.report['hops'].append(report)
+
+ except IndexError:
+ if answer.src == self.dst_ip:
+ answer.show()
+ log.msg("Reached the destination. We have finished the traceroute")
+ return
+
diff --git a/nettests/experimental/squid.py b/nettests/experimental/squid.py
new file mode 100644
index 0000000..777bc3e
--- /dev/null
+++ b/nettests/experimental/squid.py
@@ -0,0 +1,117 @@
+# -*- encoding: utf-8 -*-
+#
+# Squid transparent HTTP proxy detector
+# *************************************
+#
+# :authors: Arturo Filastò
+# :licence: see LICENSE
+
+from ooni import utils
+from ooni.utils import log
+from ooni.templates import httpt
+
+class SquidTest(httpt.HTTPTest):
+ """
+ This test aims at detecting the presence of a squid based transparent HTTP
+ proxy. It also tries to detect the version number.
+ """
+ name = "Squid test"
+ author = "Arturo Filastò"
+ version = "0.1"
+
+ optParameters = [['backend', 'b', 'http://ooni.nu/test/', 'Test backend to use']]
+
+ #inputFile = ['urls', 'f', None, 'Urls file']
+ inputs =['http://google.com']
+ def test_cacheobject(self):
+ """
+ This detects the presence of a squid transparent HTTP proxy by sending
+ a request for cache_object://localhost/info.
+
+ The response to this request will usually also contain the squid
+ version number.
+ """
+ log.debug("Running")
+ def process_body(body):
+ if "Access Denied." in body:
+ self.report['transparent_http_proxy'] = True
+ else:
+ self.report['transparent_http_proxy'] = False
+
+ log.msg("Testing Squid proxy presence by sending a request for "\
+ "cache_object")
+ headers = {}
+ #headers["Host"] = [self.input]
+ self.report['trans_http_proxy'] = None
+ method = "GET"
+ body = "cache_object://localhost/info"
+ return self.doRequest(self.localOptions['backend'], method=method, body=body,
+ headers=headers, body_processor=process_body)
+
+ def test_search_bad_request(self):
+ """
+ Attempts to perform a request with a random invalid HTTP method.
+
+ If we are being MITMed by a Transparent Squid HTTP proxy we will get
+ back a response containing the X-Squid-Error header.
+ """
+ def process_headers(headers):
+ log.debug("Processing headers in test_search_bad_request")
+ if 'X-Squid-Error' in headers:
+ log.msg("Detected the presence of a transparent HTTP "\
+ "squid proxy")
+ self.report['trans_http_proxy'] = True
+ else:
+ log.msg("Did not detect the presence of transparent HTTP "\
+ "squid proxy")
+ self.report['transparent_http_proxy'] = False
+
+ log.msg("Testing Squid proxy presence by sending a random bad request")
+ headers = {}
+ #headers["Host"] = [self.input]
+ method = utils.randomSTR(10, True)
+ self.report['transparent_http_proxy'] = None
+ return self.doRequest(self.localOptions['backend'], method=method,
+ headers=headers, headers_processor=process_headers)
+
+ def test_squid_headers(self):
+ """
+ Detects the presence of a squid transparent HTTP proxy based on the
+ response headers it adds to the responses to requests.
+ """
+ def process_headers(headers):
+ """
+ Checks if any of the headers that squid is known to add match the
+ squid regexp.
+
+ We are looking for something that looks like this:
+
+ via: 1.0 cache_server:3128 (squid/2.6.STABLE21)
+ x-cache: MISS from cache_server
+ x-cache-lookup: MISS from cache_server:3128
+ """
+ squid_headers = {'via': r'.* \((squid.*)\)',
+ 'x-cache': r'MISS from (\w+)',
+ 'x-cache-lookup': r'MISS from (\w+:?\d+?)'
+ }
+
+ self.report['transparent_http_proxy'] = False
+ for key in squid_headers.keys():
+ if key in headers:
+ log.debug("Found %s in headers" % key)
+ m = re.search(squid_headers[key], headers[key])
+ if m:
+ log.msg("Detected the presence of squid transparent"\
+ " HTTP Proxy")
+ self.report['transparent_http_proxy'] = True
+
+ log.msg("Testing Squid proxy by looking at response headers")
+ headers = {}
+ #headers["Host"] = [self.input]
+ method = "GET"
+ self.report['transparent_http_proxy'] = None
+ d = self.doRequest(self.localOptions['backend'], method=method,
+ headers=headers, headers_processor=process_headers)
+ return d
+
+
diff --git a/nettests/manipulation/captiveportal.py b/nettests/manipulation/captiveportal.py
new file mode 100644
index 0000000..be8da27
--- /dev/null
+++ b/nettests/manipulation/captiveportal.py
@@ -0,0 +1,644 @@
+# -*- coding: utf-8 -*-
+"""
+ captiveportal
+ *************
+
+ This test is a collection of tests to detect the presence of a
+ captive portal. Code is taken, in part, from the old ooni-probe,
+ which was written by Jacob Appelbaum and Arturo Filastò.
+
+ This module performs multiple tests that match specific vendor captive
+ portal tests. This is a basic internet captive portal filter tester written
+ for RECon 2011.
+
+ Read the following URLs to understand the captive portal detection process
+ for various vendors:
+
+ http://technet.microsoft.com/en-us/library/cc766017%28WS.10%29.aspx
+ http://blog.superuser.com/2011/05/16/windows-7-network-awareness/
+ http://isc.sans.org/diary.html?storyid=10312&
+ http://src.chromium.org/viewvc/chrome?view=rev&revision=74608
+ http://code.google.com/p/chromium-os/issues/detail?3281ttp,
+ http://crbug.com/52489
+ http://crbug.com/71736
+ https://bugzilla.mozilla.org/show_bug.cgi?id=562917
+ https://bugzilla.mozilla.org/show_bug.cgi?id=603505
+ http://lists.w3.org/Archives/Public/ietf-http-wg/2011JanMar/0086.html
+ http://tools.ietf.org/html/draft-nottingham-http-portal-02
+
+ :copyright: (c) 2012 Isis Lovecruft
+ :license: see LICENSE for more details
+"""
+import base64
+import os
+import random
+import re
+import string
+import urllib2
+from urlparse import urlparse
+
+from twisted.python import usage
+from twisted.internet import defer, threads
+
+from ooni import nettest
+from ooni.templates import httpt
+from ooni.utils import net
+from ooni.utils import log
+
+try:
+ from dns import resolver
+except ImportError:
+ print "The dnspython module was not found:"
+ print "See https://crate.io/packages/dnspython/"
+ resolver = None
+
+__plugoo__ = "captiveportal"
+__desc__ = "Captive portal detection test"
+
+class UsageOptions(usage.Options):
+ optParameters = [['asset', 'a', None, 'Asset file'],
+ ['experiment-url', 'e', 'http://google.com/', 'Experiment URL'],
+ ['user-agent', 'u', random.choice(net.userAgents),
+ 'User agent for HTTP requests']
+ ]
+
+class CaptivePortal(nettest.NetTestCase):
+ """
+ Compares content and status codes of HTTP responses, and attempts
+ to determine if content has been altered.
+ """
+
+ name = "captivep"
+ description = "Captive Portal Test"
+ version = '0.2'
+ author = "Isis Lovecruft"
+ usageOptions = UsageOptions
+
+ def http_fetch(self, url, headers={}):
+ """
+ Parses an HTTP url, fetches it, and returns a urllib2 response
+ object.
+ """
+ url = urlparse(url).geturl()
+ request = urllib2.Request(url, None, headers)
+ response = urllib2.urlopen(request)
+ response_headers = dict(response.headers)
+ return response, response_headers
+
+ def http_content_match_fuzzy_opt(self, experimental_url, control_result,
+ headers=None, fuzzy=False):
+ """
+ Makes an HTTP request on port 80 for experimental_url, then
+ compares the response_content of experimental_url with the
+ control_result. Optionally, if the fuzzy parameter is set to
+ True, the response_content is compared with a regex of the
+ control_result. If the response_content from the
+ experimental_url and the control_result match, returns True
+ with the HTTP status code and headers; False, status code, and
+ headers if otherwise.
+ """
+
+ if headers is None:
+ default_ua = self.local_options['user-agent']
+ headers = {'User-Agent': default_ua}
+
+ response, response_headers = self.http_fetch(experimental_url, headers)
+ response_content = response.read()
+ response_code = response.code
+ if response_content is None:
+ log.warn("HTTP connection appears to have failed.")
+ return False, False, False
+
+ if fuzzy:
+ pattern = re.compile(control_result)
+ match = pattern.search(response_content)
+ log.msg("Fuzzy HTTP content comparison for experiment URL")
+ log.msg("'%s'" % experimental_url)
+ if not match:
+ log.msg("does not match!")
+ return False, response_code, response_headers
+ else:
+ log.msg("and the expected control result yielded a match.")
+ return True, response_code, response_headers
+ else:
+ if str(response_content) != str(control_result):
+ log.msg("HTTP content comparison of experiment URL")
+ log.msg("'%s'" % experimental_url)
+ log.msg("and the expected control result do not match.")
+ return False, response_code, response_headers
+ else:
+ return True, response_code, response_headers
+
+ def http_status_code_match(self, experiment_code, control_code):
+ """
+ Compare two HTTP status codes, returns True if they match.
+ """
+ return int(experiment_code) == int(control_code)
+
+ def http_status_code_no_match(self, experiment_code, control_code):
+ """
+ Compare two HTTP status codes, returns True if they do not match.
+ """
+ return int(experiment_code) != int(control_code)
+
+ def dns_resolve(self, hostname, nameserver=None):
+ """
+ Resolves hostname(s) though nameserver to corresponding
+ address(es). hostname may be either a single hostname string,
+ or a list of strings. If nameserver is not given, use local
+ DNS resolver, and if that fails try using 8.8.8.8.
+ """
+ if not resolver:
+ log.msg("dnspython is not installed.\
+ Cannot perform DNS Resolve test")
+ return []
+ if isinstance(hostname, str):
+ hostname = [hostname]
+
+ if nameserver is not None:
+ res = resolver.Resolver(configure=False)
+ res.nameservers = [nameserver]
+ else:
+ res = resolver.Resolver()
+
+ response = []
+ answer = None
+
+ for hn in hostname:
+ try:
+ answer = res.query(hn)
+ except resolver.NoNameservers:
+ res.nameservers = ['8.8.8.8']
+ try:
+ answer = res.query(hn)
+ except resolver.NXDOMAIN:
+ log.msg("DNS resolution for %s returned NXDOMAIN" % hn)
+ response.append('NXDOMAIN')
+ except resolver.NXDOMAIN:
+ log.msg("DNS resolution for %s returned NXDOMAIN" % hn)
+ response.append('NXDOMAIN')
+ finally:
+ if not answer:
+ return response
+ for addr in answer:
+ response.append(addr.address)
+ return response
+
+ def dns_resolve_match(self, experiment_hostname, control_address):
+ """
+ Resolve experiment_hostname, and check to see that it returns
+ an experiment_address which matches the control_address. If
+ they match, returns True and experiment_address; otherwise
+ returns False and experiment_address.
+ """
+ experiment_address = self.dns_resolve(experiment_hostname)
+ if not experiment_address:
+ log.debug("dns_resolve() for %s failed" % experiment_hostname)
+ return None, experiment_address
+
+ if len(set(experiment_address) & set([control_address])) > 0:
+ return True, experiment_address
+ else:
+ log.msg("DNS comparison of control '%s' does not" % control_address)
+ log.msg("match experiment response '%s'" % experiment_address)
+ return False, experiment_address
+
+ def get_auth_nameservers(self, hostname):
+ """
+ Many CPs set a nameserver to be used. Let's query that
+ nameserver for the authoritative nameservers of hostname.
+
+ The equivalent of:
+ $ dig +short NS ooni.nu
+ """
+ if not resolver:
+ log.msg("dnspython not installed.")
+ log.msg("Cannot perform test.")
+ return []
+
+ res = resolver.Resolver()
+ answer = res.query(hostname, 'NS')
+ auth_nameservers = []
+ for auth in answer:
+ auth_nameservers.append(auth.to_text())
+ return auth_nameservers
+
+ def hostname_to_0x20(self, hostname):
+ """
+ MaKEs yOur HOsTnaME lOoK LiKE THis.
+
+ For more information, see:
+ D. Dagon, et. al. "Increased DNS Forgery Resistance
+ Through 0x20-Bit Encoding". Proc. CSS, 2008.
+ """
+ hostname_0x20 = ''
+ for char in hostname:
+ l33t = random.choice(['caps', 'nocaps'])
+ if l33t == 'caps':
+ hostname_0x20 += char.capitalize()
+ else:
+ hostname_0x20 += char.lower()
+ return hostname_0x20
+
+ def check_0x20_to_auth_ns(self, hostname, sample_size=None):
+ """
+ Resolve a 0x20 DNS request for hostname over hostname's
+ authoritative nameserver(s), and check to make sure that
+ the capitalization in the 0x20 request matches that of the
+ response. Also, check the serial numbers of the SOA (Start
+ of Authority) records on the authoritative nameservers to
+ make sure that they match.
+
+ If sample_size is given, a random sample equal to that number
+ of authoritative nameservers will be queried; default is 5.
+ """
+ log.msg("")
+ log.msg("Testing random capitalization of DNS queries...")
+ log.msg("Testing that Start of Authority serial numbers match...")
+
+ auth_nameservers = self.get_auth_nameservers(hostname)
+
+ if sample_size is None:
+ sample_size = 5
+ resolved_auth_ns = random.sample(self.dns_resolve(auth_nameservers),
+ sample_size)
+
+ querynames = []
+ answernames = []
+ serials = []
+
+ # Even when gevent monkey patching is on, the requests here
+ # are sent without being 0x20'd, so we need to 0x20 them.
+ hostname = self.hostname_to_0x20(hostname)
+
+ for auth_ns in resolved_auth_ns:
+ res = resolver.Resolver(configure=False)
+ res.nameservers = [auth_ns]
+ try:
+ answer = res.query(hostname, 'SOA')
+ except resolver.Timeout:
+ continue
+ querynames.append(answer.qname.to_text())
+ answernames.append(answer.rrset.name.to_text())
+ for soa in answer:
+ serials.append(str(soa.serial))
+
+ if len(set(querynames).intersection(answernames)) == 1:
+ log.msg("Capitalization in DNS queries and responses match.")
+ name_match = True
+ else:
+ log.msg("The random capitalization '%s' used in" % hostname)
+ log.msg("DNS queries to that hostname's authoritative")
+ log.msg("nameservers does not match the capitalization in")
+ log.msg("the response.")
+ name_match = False
+
+ if len(set(serials)) == 1:
+ log.msg("Start of Authority serial numbers all match.")
+ serial_match = True
+ else:
+ log.msg("Some SOA serial numbers did not match the rest!")
+ serial_match = False
+
+ ret = name_match, serial_match, querynames, answernames, serials
+
+ if name_match and serial_match:
+ log.msg("Your DNS queries do not appear to be tampered.")
+ return ret
+ elif name_match or serial_match:
+ log.msg("Something is tampering with your DNS queries.")
+ return ret
+ elif not name_match and not serial_match:
+ log.msg("Your DNS queries are definitely being tampered with.")
+ return ret
+
+ def get_random_url_safe_string(self, length):
+ """
+ Returns a random url-safe string of specified length, where
+ 0 < length <= 256. The returned string will always start with
+ an alphabetic character.
+ """
+ if (length <= 0):
+ length = 1
+ elif (length > 256):
+ length = 256
+
+ random_ascii = base64.urlsafe_b64encode(os.urandom(int(length)))
+
+ while not random_ascii[:1].isalpha():
+ random_ascii = base64.urlsafe_b64encode(os.urandom(int(length)))
+
+ three_quarters = int((len(random_ascii)) * (3.0/4.0))
+ random_string = random_ascii[:three_quarters]
+ return random_string
+
+ def get_random_hostname(self, length=None):
+ """
+ Returns a random hostname with SLD of specified length. If
+ length is unspecified, length=32 is used.
+
+ These *should* all resolve to NXDOMAIN. If they actually
+ resolve to a box that isn't part of a captive portal that
+ would be rather interesting.
+ """
+ if length is None:
+ length = 32
+
+ random_sld = self.get_random_url_safe_string(length)
+
+ # if it doesn't start with a letter, chuck it.
+ while not random_sld[:1].isalpha():
+ random_sld = self.get_random_url_safe_string(length)
+
+ tld_list = ['.com', '.net', '.org', '.info', '.test', '.invalid']
+ random_tld = urllib2.random.choice(tld_list)
+ random_hostname = random_sld + random_tld
+ return random_hostname
+
+ def compare_random_hostnames(self, hostname_count=None, hostname_length=None):
+ """
+ Get hostname_count number of random hostnames with SLD length
+ of hostname_length, and then attempt DNS resolution. If no
+ arguments are given, default to three hostnames of 32 bytes
+ each. These random hostnames *should* resolve to NXDOMAIN,
+ except in the case where a user is presented with a captive
+ portal and remains unauthenticated, in which case the captive
+ portal may return the address of the authentication page.
+
+ If the cardinality of the intersection of the set of resolved
+ random hostnames and the single element control set
+ (['NXDOMAIN']) are equal to one, then DNS properly resolved.
+
+ Returns true if only NXDOMAINs were returned, otherwise returns
+ False with the relative complement of the control set in the
+ response set.
+ """
+ if hostname_count is None:
+ hostname_count = 3
+
+ log.msg("Generating random hostnames...")
+ log.msg("Resolving DNS for %d random hostnames..." % hostname_count)
+
+ control = ['NXDOMAIN']
+ responses = []
+
+ for x in range(hostname_count):
+ random_hostname = self.get_random_hostname(hostname_length)
+ response_match, response_address = self.dns_resolve_match(random_hostname,
+ control[0])
+ for address in response_address:
+ if response_match is False:
+ log.msg("Strangely, DNS resolution of the random hostname")
+ log.msg("%s actually points to %s"
+ % (random_hostname, response_address))
+ responses = responses + [address]
+ else:
+ responses = responses + [address]
+
+ intersection = set(responses) & set(control)
+ relative_complement = set(responses) - set(control)
+ r = set(responses)
+
+ if len(intersection) == 1:
+ log.msg("All %d random hostnames properly resolved to NXDOMAIN."
+ % hostname_count)
+ return True, relative_complement
+ elif (len(intersection) == 1) and (len(r) > 1):
+ log.msg("Something odd happened. Some random hostnames correctly")
+ log.msg("resolved to NXDOMAIN, but several others resolved to")
+ log.msg("to the following addresses: %s" % relative_complement)
+ return False, relative_complement
+ elif (len(intersection) == 0) and (len(r) == 1):
+ log.msg("All random hostnames resolved to the IP address ")
+ log.msg("'%s', which is indicative of a captive portal." % r)
+ return False, relative_complement
+ else:
+ log.debug("Apparently, pigs are flying on your network, 'cause a")
+ log.debug("bunch of hostnames made from 32-byte random strings")
+ log.debug("just magically resolved to a bunch of random addresses.")
+ log.debug("That is definitely highly improbable. In fact, my napkin")
+ log.debug("tells me that the probability of just one of those")
+ log.debug("hostnames resolving to an address is 1.68e-59, making")
+ log.debug("it nearly twice as unlikely as an MD5 hash collision.")
+ log.debug("Either someone is seriously messing with your network,")
+ log.debug("or else you are witnessing the impossible. %s" % r)
+ return False, relative_complement
+
+ def google_dns_cp_test(self):
+ """
+ Google Chrome resolves three 10-byte random hostnames.
+ """
+ subtest = "Google Chrome DNS-based"
+ log.msg("Running the Google Chrome DNS-based captive portal test...")
+
+ gmatch, google_dns_result = self.compare_random_hostnames(3, 10)
+
+ if gmatch:
+ log.msg("Google Chrome DNS-based captive portal test did not")
+ log.msg("detect a captive portal.")
+ return google_dns_result
+ else:
+ log.msg("Google Chrome DNS-based captive portal test believes")
+ log.msg("you are in a captive portal, or else something very")
+ log.msg("odd is happening with your DNS.")
+ return google_dns_result
+
+ def ms_dns_cp_test(self):
+ """
+ Microsoft "phones home" to a server which will always resolve
+ to the same address.
+ """
+ subtest = "Microsoft NCSI DNS-based"
+
+ log.msg("")
+ log.msg("Running the Microsoft NCSI DNS-based captive portal")
+ log.msg("test...")
+
+ msmatch, ms_dns_result = self.dns_resolve_match("dns.msftncsi.com",
+ "131.107.255.255")
+ if msmatch:
+ log.msg("Microsoft NCSI DNS-based captive portal test did not")
+ log.msg("detect a captive portal.")
+ return ms_dns_result
+ else:
+ log.msg("Microsoft NCSI DNS-based captive portal test ")
+ log.msg("believes you are in a captive portal.")
+ return ms_dns_result
+
+ def run_vendor_dns_tests(self):
+ """
+ Run the vendor DNS tests.
+ """
+ report = {}
+ report['google_dns_cp'] = self.google_dns_cp_test()
+ report['ms_dns_cp'] = self.ms_dns_cp_test()
+
+ return report
+
+ def run_vendor_tests(self, *a, **kw):
+ """
+ These are several vendor tests used to detect the presence of
+ a captive portal. Each test compares HTTP status code and
+ content to the control results and has its own User-Agent
+ string, in order to emulate the test as it would occur on the
+ device it was intended for. Vendor tests are defined in the
+ format:
+ [exp_url, ctrl_result, ctrl_code, ua, test_name]
+ """
+
+ vendor_tests = [['http://www.apple.com/library/test/success.html',
+ 'Success',
+ '200',
+ 'Mozilla/5.0 (iPhone; U; CPU like Mac OS X; en) AppleWebKit/420+ (KHTML, like Gecko) Version/3.0 Mobile/1A543a Safari/419.3',
+ 'Apple HTTP Captive Portal'],
+ ['http://tools.ietf.org/html/draft-nottingham-http-portal-02',
+ '428 Network Authentication Required',
+ '428',
+ 'Mozilla/5.0 (Windows NT 6.1; rv:5.0) Gecko/20100101 Firefox/5.0',
+ 'W3 Captive Portal'],
+ ['http://www.msftncsi.com/ncsi.txt',
+ 'Microsoft NCSI',
+ '200',
+ 'Microsoft NCSI',
+ 'MS HTTP Captive Portal',]]
+
+ cm = self.http_content_match_fuzzy_opt
+ sm = self.http_status_code_match
+ snm = self.http_status_code_no_match
+
+ def compare_content(status_func, fuzzy, experiment_url, control_result,
+ control_code, headers, test_name):
+ log.msg("")
+ log.msg("Running the %s test..." % test_name)
+
+ content_match, experiment_code, experiment_headers = cm(experiment_url,
+ control_result,
+ headers, fuzzy)
+ status_match = status_func(experiment_code, control_code)
+
+ if status_match and content_match:
+ log.msg("The %s test was unable to detect" % test_name)
+ log.msg("a captive portal.")
+ return True
+ else:
+ log.msg("The %s test shows that your network" % test_name)
+ log.msg("is filtered.")
+ return False
+
+ result = []
+ for vt in vendor_tests:
+ report = {}
+ report['vt'] = vt
+
+ experiment_url = vt[0]
+ control_result = vt[1]
+ control_code = vt[2]
+ headers = {'User-Agent': vt[3]}
+ test_name = vt[4]
+
+ args = (experiment_url, control_result, control_code, headers, test_name)
+
+ if test_name == "MS HTTP Captive Portal":
+ report['result'] = compare_content(sm, False, *args)
+
+ elif test_name == "Apple HTTP Captive Portal":
+ report['result'] = compare_content(sm, True, *args)
+
+ elif test_name == "W3 Captive Portal":
+ report['result'] = compare_content(snm, True, *args)
+
+ else:
+ log.warn("Ooni is trying to run an undefined CP vendor test.")
+ result.append(report)
+ return result
+
+ def control(self, experiment_result, args):
+ """
+ Compares the content and status code of the HTTP response for
+ experiment_url with the control_result and control_code
+ respectively. If the status codes match, but the experimental
+ content and control_result do not match, fuzzy matching is enabled
+ to determine if the control_result is at least included somewhere
+ in the experimental content. Returns True if matches are found,
+ and False if otherwise.
+ """
+ # XXX put this back to being parametrized
+ #experiment_url = self.local_options['experiment-url']
+ experiment_url = 'http://google.com/'
+ control_result = 'XX'
+ control_code = 200
+ ua = self.local_options['user-agent']
+
+ cm = self.http_content_match_fuzzy_opt
+ sm = self.http_status_code_match
+ snm = self.http_status_code_no_match
+
+ log.msg("Running test for '%s'..." % experiment_url)
+ content_match, experiment_code, experiment_headers = cm(experiment_url,
+ control_result)
+ status_match = sm(experiment_code, control_code)
+ if status_match and content_match:
+ log.msg("The test for '%s'" % experiment_url)
+ log.msg("was unable to detect a captive portal.")
+
+ self.report['result'] = True
+
+ elif status_match and not content_match:
+ log.msg("Retrying '%s' with fuzzy match enabled."
+ % experiment_url)
+ fuzzy_match, experiment_code, experiment_headers = cm(experiment_url,
+ control_result,
+ fuzzy=True)
+ if fuzzy_match:
+ self.report['result'] = True
+ else:
+ log.msg("Found modified content on '%s'," % experiment_url)
+ log.msg("which could indicate a captive portal.")
+
+ self.report['result'] = False
+ else:
+ log.msg("The content comparison test for ")
+ log.msg("'%s'" % experiment_url)
+ log.msg("shows that your HTTP traffic is filtered.")
+
+ self.report['result'] = False
+
+ @defer.inlineCallbacks
+ def test_captive_portal(self):
+ """
+ Runs the CaptivePortal(Test).
+
+ CONFIG OPTIONS
+ --------------
+
+ If "do_captive_portal_vendor_tests" is set to "true", then vendor
+ specific captive portal HTTP-based tests will be run.
+
+ If "do_captive_portal_dns_tests" is set to "true", then vendor
+ specific captive portal DNS-based tests will be run.
+
+ If "check_dns_requests" is set to "true", then Ooni-probe will
+ attempt to check that your DNS requests are not being tampered with
+ by a captive portal.
+
+ If "captive_portal" = "yourfilename.txt", then user-specified tests
+ will be run.
+
+ Any combination of the above tests can be run.
+ """
+
+ log.msg("")
+ log.msg("Running vendor tests...")
+ self.report['vendor_tests'] = yield threads.deferToThread(self.run_vendor_tests)
+
+ log.msg("")
+ log.msg("Running vendor DNS-based tests...")
+ self.report['vendor_dns_tests'] = yield threads.deferToThread(self.run_vendor_dns_tests)
+
+ log.msg("")
+ log.msg("Checking that DNS requests are not being tampered...")
+ self.report['check0x20'] = yield threads.deferToThread(self.check_0x20_to_auth_ns, 'ooni.nu')
+
+ log.msg("")
+ log.msg("Captive portal test finished!")
+
diff --git a/nettests/manipulation/dnsspoof.py b/nettests/manipulation/dnsspoof.py
new file mode 100644
index 0000000..5c50c2f
--- /dev/null
+++ b/nettests/manipulation/dnsspoof.py
@@ -0,0 +1,69 @@
+from twisted.internet import defer
+from twisted.python import usage
+
+from scapy.all import IP, UDP, DNS, DNSQR
+
+from ooni.templates import scapyt
+from ooni.utils import log
+
+class UsageOptions(usage.Options):
+ optParameters = [['resolver', 'r', None,
+ 'Specify the resolver that should be used for DNS queries (ip:port)'],
+ ['hostname', 'h', None,
+ 'Specify the hostname of a censored site'],
+ ['backend', 'b', '8.8.8.8:53',
+ 'Specify the IP address of a good DNS resolver (ip:port)']
+ ]
+
+
+class DNSSpoof(scapyt.ScapyTest):
+ name = "DNS Spoof"
+ timeout = 2
+
+ usageOptions = UsageOptions
+
+ requiredOptions = ['hostname', 'resolver']
+
+ def setUp(self):
+ self.resolverAddr, self.resolverPort = self.localOptions['resolver'].split(':')
+ self.resolverPort = int(self.resolverPort)
+
+ self.controlResolverAddr, self.controlResolverPort = self.localOptions['backend'].split(':')
+ self.controlResolverPort = int(self.controlResolverPort)
+
+ self.hostname = self.localOptions['hostname']
+
+ def postProcessor(self, report):
+ """
+ This is not tested, but the concept is that if the two responses
+ match up then spoofing is occuring.
+ """
+ try:
+ test_answer = report['test_a_lookup']['answered_packets'][0][1]
+ control_answer = report['test_control_a_lookup']['answered_packets'][0][1]
+ except IndexError:
+ self.report['spoofing'] = 'no_answer'
+ return
+
+ if test_answer[UDP] == control_answer[UDP]:
+ self.report['spoofing'] = True
+ else:
+ self.report['spoofing'] = False
+ return
+
+ @defer.inlineCallbacks
+ def test_a_lookup(self):
+ question = IP(dst=self.resolverAddr)/UDP()/DNS(rd=1,
+ qd=DNSQR(qtype="A", qclass="IN", qname=self.hostname))
+ log.msg("Performing query to %s with %s:%s" % (self.hostname, self.resolverAddr, self.resolverPort))
+ yield self.sr1(question)
+
+ @defer.inlineCallbacks
+ def test_control_a_lookup(self):
+ question = IP(dst=self.controlResolverAddr)/UDP()/DNS(rd=1,
+ qd=DNSQR(qtype="A", qclass="IN", qname=self.hostname))
+ log.msg("Performing query to %s with %s:%s" % (self.hostname,
+ self.controlResolverAddr, self.controlResolverPort))
+ yield self.sr1(question)
+
+
diff --git a/nettests/manipulation/http_header_field_manipulation.py b/nettests/manipulation/http_header_field_manipulation.py
new file mode 100644
index 0000000..08ee8c7
--- /dev/null
+++ b/nettests/manipulation/http_header_field_manipulation.py
@@ -0,0 +1,181 @@
+# -*- encoding: utf-8 -*-
+#
+# :authors: Arturo Filastò
+# :licence: see LICENSE
+
+import random
+import json
+import yaml
+
+from twisted.python import usage
+
+from ooni.utils import log, net, randomStr
+from ooni.templates import httpt
+from ooni.utils.txagentwithsocks import TrueHeaders
+
+def random_capitalization(string):
+ output = ""
+ original_string = string
+ string = string.swapcase()
+ for i in range(len(string)):
+ if random.randint(0, 1):
+ output += string[i].swapcase()
+ else:
+ output += string[i]
+ if original_string == output:
+ return random_capitalization(output)
+ else:
+ return output
+
+class UsageOptions(usage.Options):
+ optParameters = [
+ ['backend', 'b', 'http://127.0.0.1:57001',
+ 'URL of the backend to use for sending the requests'],
+ ['headers', 'h', None,
+ 'Specify a yaml formatted file from which to read the request headers to send']
+ ]
+
+class HTTPHeaderFieldManipulation(httpt.HTTPTest):
+ """
+ It performes HTTP requests with request headers that vary capitalization
+ towards a backend. If we detect that the headers the backend received
+ matches the ones we have sent then we have detected tampering.
+ """
+ name = "HTTP Header Field Manipulation"
+ author = "Arturo Filastò"
+ version = "0.1.3"
+
+ randomizeUA = False
+ usageOptions = UsageOptions
+
+ requiredOptions = ['backend']
+
+ def get_headers(self):
+ headers = {}
+ if self.localOptions['headers']:
+ try:
+ f = open(self.localOptions['headers'])
+ except IOError:
+ raise Exception("Specified input file does not exist")
+ content = ''.join(f.readlines())
+ f.close()
+ headers = yaml.safe_load(content)
+ return headers
+ else:
+ # XXX generate these from a random choice taken from whatheaders.com
+ # http://s3.amazonaws.com/data.whatheaders.com/whatheaders-latest.xml.zip
+ headers = {"User-Agent": [random.choice(net.userAgents)[0]],
+ "Accept": ["text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8"],
+ "Accept-Encoding": ["gzip,deflate,sdch"],
+ "Accept-Language": ["en-US,en;q=0.8"],
+ "Accept-Charset": ["ISO-8859-1,utf-8;q=0.7,*;q=0.3"],
+ "Host": [randomStr(15)+'.com']
+ }
+ return headers
+
+ def get_random_caps_headers(self):
+ headers = {}
+ normal_headers = self.get_headers()
+ for k, v in normal_headers.items():
+ new_key = random_capitalization(k)
+ headers[new_key] = v
+ return headers
+
+ def processInputs(self):
+ if self.localOptions['backend']:
+ self.url = self.localOptions['backend']
+ else:
+ raise Exception("No backend specified")
+
+ def processResponseBody(self, data):
+ self.check_for_tampering(data)
+
+ def check_for_tampering(self, data):
+ """
+ Here we do checks to verify if the request we made has been tampered
+ with. We have 3 categories of tampering:
+
+ * **total** when the response is not a json object and therefore we were not
+ able to reach the ooniprobe test backend
+
+ * **request_line_capitalization** when the HTTP Request line (e.x. GET /
+ HTTP/1.1) does not match the capitalization we set.
+
+ * **header_field_number** when the number of headers we sent does not match
+ with the ones the backend received
+
+ * **header_name_capitalization** when the header field names do not match
+ those that we sent.
+
+ * **header_field_value** when the header field value does not match with the
+ one we transmitted.
+ """
+ self.report['tampering'] = {
+ 'total': False,
+ 'request_line_capitalization': False,
+ 'header_name_capitalization': False,
+ 'header_field_value': False,
+ 'header_field_number': False
+ }
+ try:
+ response = json.loads(data)
+ except ValueError:
+ self.report['tampering']['total'] = True
+ return
+
+ request_request_line = "%s / HTTP/1.1" % self.request_method
+
+ try:
+ response_request_line = response['request_line']
+ response_headers_dict = response['headers_dict']
+ except KeyError:
+ self.report['tampering']['total'] = True
+ return
+
+ if request_request_line != response_request_line:
+ self.report['tampering']['request_line_capitalization'] = True
+
+ request_headers = TrueHeaders(self.request_headers)
+ diff = request_headers.getDiff(response_headers_dict, ignore=['Connection'])
+ if diff:
+ self.report['tampering']['header_field_name'] = True
+ else:
+ self.report['tampering']['header_field_name'] = False
+ self.report['tampering']['header_name_diff'] = list(diff)
+
+ def test_get(self):
+ self.request_method = "GET"
+ self.request_headers = self.get_random_caps_headers()
+ return self.doRequest(self.url, self.request_method,
+ headers=self.request_headers)
+
+ def test_get_random_capitalization(self):
+ self.request_method = random_capitalization("GET")
+ self.request_headers = self.get_random_caps_headers()
+ return self.doRequest(self.url, self.request_method,
+ headers=self.request_headers)
+
+ def test_post(self):
+ self.request_method = "POST"
+ self.request_headers = self.get_headers()
+ return self.doRequest(self.url, self.request_method,
+ headers=self.request_headers)
+
+ def test_post_random_capitalization(self):
+ self.request_method = random_capitalization("POST")
+ self.request_headers = self.get_random_caps_headers()
+ return self.doRequest(self.url, self.request_method,
+ headers=self.request_headers)
+
+ def test_put(self):
+ self.request_method = "PUT"
+ self.request_headers = self.get_headers()
+ return self.doRequest(self.url, self.request_method,
+ headers=self.request_headers)
+
+ def test_put_random_capitalization(self):
+ self.request_method = random_capitalization("PUT")
+ self.request_headers = self.get_random_caps_headers()
+ return self.doRequest(self.url, self.request_method,
+ headers=self.request_headers)
+
diff --git a/nettests/manipulation/http_host.py b/nettests/manipulation/http_host.py
new file mode 100644
index 0000000..3ebfd04
--- /dev/null
+++ b/nettests/manipulation/http_host.py
@@ -0,0 +1,109 @@
+# -*- encoding: utf-8 -*-
+#
+# HTTP Host Test
+# **************
+#
+# :authors: Arturo Filastò
+# :licence: see LICENSE
+
+import json
+from twisted.python import usage
+
+from ooni.utils import log
+from ooni.templates import httpt
+
+class UsageOptions(usage.Options):
+ optParameters = [['backend', 'b', 'http://127.0.0.1:57001',
+ 'URL of the test backend to use'],
+ ['content', 'c', None,
+ 'The file to read from containing the content of a block page']]
+
+class HTTPHost(httpt.HTTPTest):
+ """
+ This test is aimed at detecting the presence of a transparent HTTP proxy
+ and enumerating the sites that are being censored by it.
+
+ It places inside of the Host header field the hostname of the site that is
+ to be tested for censorship and then determines if the probe is behind a
+ transparent HTTP proxy (because the response from the backend server does
+ not match) and if the site is censorsed, by checking if the page that it
+ got back matches the input block page.
+ """
+ name = "HTTP Host"
+ author = "Arturo Filastò"
+ version = "0.2"
+
+ usageOptions = UsageOptions
+
+ inputFile = ['file', 'f', None,
+ 'List of hostnames to test for censorship']
+
+ requiredOptions = ['backend']
+
+ def test_send_host_header(self):
+ """
+ Stuffs the HTTP Host header field with the site to be tested for
+ censorship and does an HTTP request of this kind to our backend.
+
+ We randomize the HTTP User Agent headers.
+ """
+ headers = {}
+ headers["Host"] = [self.input]
+ return self.doRequest(self.localOptions['backend'],
+ headers=headers)
+
+ def check_for_censorship(self, body):
+ """
+ If we have specified what a censorship page looks like here we will
+ check if the page we are looking at matches it.
+
+ XXX this is not tested, though it is basically what was used to detect
+ censorship in the palestine case.
+ """
+ if self.localOptions['content']:
+ self.report['censored'] = True
+
+ censorship_page = open(self.localOptions['content'])
+ response_page = iter(body.split("\n"))
+
+ for censorship_line in censorship_page.xreadlines():
+ response_line = response_page.next()
+ if response_line != censorship_line:
+ self.report['censored'] = False
+ break
+
+ censorship_page.close()
+
+ def processResponseBody(self, body):
+ """
+ XXX this is to be filled in with either a domclass based classified or
+ with a rule that will allow to detect that the body of the result is
+ that of a censored site.
+ """
+ # If we don't see a json array we know that something is wrong for
+ # sure
+ if not body.startswith("{"):
+ self.report['transparent_http_proxy'] = True
+ self.check_for_censorship(body)
+ return
+ try:
+ content = json.loads(body)
+ except:
+ log.debug("The json does not parse, this is not what we expected")
+ self.report['trans_http_proxy'] = True
+ self.check_for_censorship(body)
+ return
+
+ # We base the determination of the presence of a transparent HTTP
+ # proxy on the basis of the response containing the json that is to be
+ # returned by a HTTP Request Test Helper
+ if 'request_method' in content and \
+ 'request_uri' in content and \
+ 'request_headers' in content:
+ log.debug("Found the keys I expected in %s" % content)
+ self.report['trans_http_proxy'] = False
+ else:
+ log.debug("Did not find the keys I expected in %s" % content)
+ self.report['trans_http_proxy'] = True
+
+ self.check_for_censorship(body)
diff --git a/nettests/manipulation/traceroute.py b/nettests/manipulation/traceroute.py
new file mode 100644
index 0000000..f8311fd
--- /dev/null
+++ b/nettests/manipulation/traceroute.py
@@ -0,0 +1,137 @@
+# -*- encoding: utf-8 -*-
+#
+# :authors: Arturo Filastò
+# :licence: see LICENSE
+
+from twisted.python import usage
+from twisted.internet import defer
+
+from ooni.templates import scapyt
+
+from scapy.all import *
+
+from ooni.utils import log
+
+class UsageOptions(usage.Options):
+ optParameters = [
+ ['backend', 'b', '8.8.8.8', 'Test backend to use'],
+ ['timeout', 't', 5, 'The timeout for the traceroute test'],
+ ['maxttl', 'm', 30, 'The maximum value of ttl to set on packets'],
+ ['srcport', 'p', None, 'Set the source port to a specific value (only applies to TCP and UDP)']
+ ]
+
+class TracerouteTest(scapyt.BaseScapyTest):
+ name = "Multi Protocol Traceroute Test"
+ author = "Arturo Filastò"
+ version = "0.1.1"
+
+ usageOptions = UsageOptions
+ dst_ports = [22, 23, 53, 80, 123, 443]
+
+ def setUp(self):
+ def get_sport(protocol):
+ if self.localOptions['srcport']:
+ return int(self.localOptions['srcport'])
+ else:
+ return random.randint(1024, 65535)
+
+ self.get_sport = get_sport
+
+ def max_ttl_and_timeout(self):
+ max_ttl = int(self.localOptions['maxttl'])
+ timeout = int(self.localOptions['timeout'])
+ self.report['max_ttl'] = max_ttl
+ self.report['timeout'] = timeout
+ return max_ttl, timeout
+
+
+ def postProcessor(self, report):
+ tcp_hops = report['test_tcp_traceroute']
+ udp_hops = report['test_udp_traceroute']
+ icmp_hops = report['test_icmp_traceroute']
+
+
+ def test_tcp_traceroute(self):
+ """
+ Does a traceroute to the destination by sending TCP SYN packets
+ with TTLs from 1 until max_ttl.
+ """
+ def finished(packets, port):
+ log.debug("Finished running TCP traceroute test on port %s" % port)
+ answered, unanswered = packets
+ self.report['hops_'+str(port)] = []
+ for snd, rcv in answered:
+ report = {'ttl': snd.ttl,
+ 'address': rcv.src,
+ 'rtt': rcv.time - snd.time,
+ 'sport': snd[UDP].sport
+ }
+ log.debug("%s: %s" % (port, report))
+ self.report['hops_'+str(port)].append(report)
+
+ dl = []
+ max_ttl, timeout = self.max_ttl_and_timeout()
+ for port in self.dst_ports:
+ packets = IP(dst=self.localOptions['backend'],
+ ttl=(1,max_ttl),id=RandShort())/TCP(flags=0x2, dport=port,
+ sport=self.get_sport('tcp'))
+
+ d = self.sr(packets, timeout=timeout)
+ d.addCallback(finished, port)
+ dl.append(d)
+ return defer.DeferredList(dl)
+
+ def test_udp_traceroute(self):
+ """
+ Does a traceroute to the destination by sending UDP packets with empty
+ payloads with TTLs from 1 until max_ttl.
+ """
+ def finished(packets, port):
+ log.debug("Finished running UDP traceroute test on port %s" % port)
+ answered, unanswered = packets
+ self.report['hops_'+str(port)] = []
+ for snd, rcv in answered:
+ report = {'ttl': snd.ttl,
+ 'address': rcv.src,
+ 'rtt': rcv.time - snd.time,
+ 'sport': snd[UDP].sport
+ }
+ log.debug("%s: %s" % (port, report))
+ self.report['hops_'+str(port)].append(report)
+ dl = []
+ max_ttl, timeout = self.max_ttl_and_timeout()
+ for port in self.dst_ports:
+ packets = IP(dst=self.localOptions['backend'],
+ ttl=(1,max_ttl),id=RandShort())/UDP(dport=port,
+ sport=self.get_sport('udp'))
+
+ d = self.sr(packets, timeout=timeout)
+ d.addCallback(finished, port)
+ dl.append(d)
+ return defer.DeferredList(dl)
+
+ def test_icmp_traceroute(self):
+ """
+ Does a traceroute to the destination by sending ICMP echo request
+ packets with TTLs from 1 until max_ttl.
+ """
+ def finished(packets):
+ log.debug("Finished running ICMP traceroute test")
+ answered, unanswered = packets
+ self.report['hops'] = []
+ for snd, rcv in answered:
+ report = {'ttl': snd.ttl,
+ 'address': rcv.src,
+ 'rtt': rcv.time - snd.time
+ }
+ log.debug("%s" % (report))
+ self.report['hops'].append(report)
+ dl = []
+ max_ttl, timeout = self.max_ttl_and_timeout()
+ packets = IP(dst=self.localOptions['backend'],
+ ttl=(1,max_ttl), id=RandShort())/ICMP()
+
+ d = self.sr(packets, timeout=timeout)
+ d.addCallback(finished)
+ return d
+
diff --git a/nettests/scanning/http_url_list.py b/nettests/scanning/http_url_list.py
new file mode 100644
index 0000000..0accaae
--- /dev/null
+++ b/nettests/scanning/http_url_list.py
@@ -0,0 +1,98 @@
+# -*- encoding: utf-8 -*-
+#
+# :authors: Arturo Filastò
+# :licence: see LICENSE
+
+from twisted.internet import defer
+from twisted.python import usage
+from ooni.templates import httpt
+from ooni.utils import log
+
+class UsageOptions(usage.Options):
+ optParameters = [['content', 'c', None,
+ 'The file to read from containing the content of a block page'],
+ ['url', 'u', None, 'Specify a single URL to test.']
+ ]
+
+class HTTPURLList(httpt.HTTPTest):
+ """
+ Performs GET, POST and PUT requests to a list of URLs specified as
+ input and checks if the page that we get back as a result matches that
+ of a block page given as input.
+
+ If no block page is given as input to the test it will simply collect the
+ responses to the HTTP requests and write them to a report file.
+ """
+ name = "HTTP URL List"
+ author = "Arturo Filastò"
+ version = "0.1.3"
+
+ usageOptions = UsageOptions
+
+ inputFile = ['file', 'f', None,
+ 'List of URLS to perform GET and POST requests to']
+
+ def setUp(self):
+ """
+ Check for inputs.
+ """
+ if self.input:
+ self.url = self.input
+ elif self.localOptions['url']:
+ self.url = self.localOptions['url']
+ else:
+ raise Exception("No input specified")
+
+ def check_for_content_censorship(self, body):
+ """
+ If we have specified what a censorship page looks like here we will
+ check if the page we are looking at matches it.
+
+ XXX this is not tested, though it is basically what was used to detect
+ censorship in the palestine case.
+ """
+ self.report['censored'] = True
+
+ censorship_page = open(self.localOptions['content']).xreadlines()
+ response_page = iter(body.split("\n"))
+
+ # We first allign the two pages to the first HTML tag (something
+ # starting with <). This is useful so that we can give as input to this
+ # test something that comes from the output of curl -kis
+ # http://the_page/
+ for line in censorship_page:
+ if line.strip().startswith("<"):
+ break
+ for line in response_page:
+ if line.strip().startswith("<"):
+ break
+
+ for censorship_line in censorship_page:
+ try:
+ response_line = response_page.next()
+ except StopIteration:
+ # The censored page and the response we got do not match in
+ # length.
+ self.report['censored'] = False
+ break
+ censorship_line = censorship_line.replace("\n", "")
+ if response_line != censorship_line:
+ self.report['censored'] = False
+
+ censorship_page.close()
+
+ def processResponseBody(self, body):
+ if self.localOptions['content']:
+ log.msg("Checking for censorship in response body")
+ self.check_for_content_censorship(body)
+
+ def test_get(self):
+ return self.doRequest(self.url, method="GET")
+
+ def test_post(self):
+ return self.doRequest(self.url, method="POST")
+
+ def test_put(self):
+ return self.doRequest(self.url, method="PUT")
+
+
diff --git a/ooni/runner.py b/ooni/runner.py
index a4b3472..9bdedfc 100644
--- a/ooni/runner.py
+++ b/ooni/runner.py
@@ -239,7 +239,8 @@ def runTestCasesWithInput(test_cases, test_input, yaml_reporter,
test_methods_d.addCallback(tests_done, test_cases[0][0])
return test_methods_d
-def runTestCasesWithInputUnit(test_cases, input_unit, yaml_reporter, oonib_reporter):
+def runTestCasesWithInputUnit(test_cases, input_unit, yaml_reporter,
+ oonib_reporter):
"""
Runs the Test Cases that are given as input parallely.
A Test Case is a subclass of ooni.nettest.NetTestCase and a list of
@@ -417,6 +418,8 @@ def runTestCases(test_cases, options, cmd_line_options):
log.err("Error in creating new report")
log.msg("We will only create reports to a file")
oonib_reporter = None
+ else:
+ oonib_reporter = None
yield yaml_reporter.createReport(options)
log.msg("Reporting to file %s" % config.reports.yamloo)
_______________________________________________
tor-commits mailing list
tor-commits@xxxxxxxxxxxxxxxxxxxx
https://lists.torproject.org/cgi-bin/mailman/listinfo/tor-commits