[Author Prev][Author Next][Thread Prev][Thread Next][Author Index][Thread Index]
[or-cvs] r15516: SSL-part is reorganized and (almost) done. More communicatio (torflow/branches/gsoc2008)
Author: aleksei
Date: 2008-06-27 14:07:21 -0400 (Fri, 27 Jun 2008)
New Revision: 15516
Modified:
torflow/branches/gsoc2008/soat.py
torflow/branches/gsoc2008/soatstats.py
Log:
SSL-part is reorganized and (almost) done. More communication with metatroller and control port.
Modified: torflow/branches/gsoc2008/soat.py
===================================================================
--- torflow/branches/gsoc2008/soat.py 2008-06-27 17:52:54 UTC (rev 15515)
+++ torflow/branches/gsoc2008/soat.py 2008-06-27 18:07:21 UTC (rev 15516)
@@ -1,11 +1,12 @@
#!/usr/bin/python
+
import httplib
-import md5
import os
import random
import re
import pickle
from sets import Set
+import smtplib
import socket
import string
import sys
@@ -23,7 +24,7 @@
from TorCtl.TorCtl import Connection
sys.path.append("./tools/BeautifulSoup/")
-from BeautifulSoup import BeautifulSoup
+from BeautifulSoup import BeautifulSoup, SoupStrainer
sys.path.append("./tools/SocksiPy/")
import socks
@@ -51,16 +52,12 @@
same_origin_policy = True
#
-# data storage
-#
-
-
-#
# links of interest
#
docs_http = ['http://www.torproject.org','http://www.math.ut.ee','http://www.mozilla.com']
docs_https = ['mail.google.com','addons.mozilla.org','www.paypal.com','www.fastmail.fm']
+docs_ssh = []
#
# ports to test in the consistency test
@@ -74,6 +71,12 @@
["http", ExitPolicyRestriction('255.255.255.255', 80), "https", ExitPolicyRestriction('255.255.255.255', 443)]
]
+# tags and attributes to check in the http test
+
+tags_to_check = ['a', 'area', 'base', 'applet', 'embed', 'form', 'frame',
+ 'iframe', 'img', 'link', 'object', 'script']
+attrs_to_check = ['onclick', 'ondblclick', 'onmousedown', 'onmouseup', 'onmouseover',
+ 'onmousemove', 'onmouseout', 'onkeypress','onkeydown','onkeyup']
#
# constants
#
@@ -105,8 +108,8 @@
# The scanner class
class ExitNodeScanner:
- def __init__(self,meta_host,meta_port):
- # establish a connection
+ def __init__(self):
+ # establish a metatroller connection
plog('INFO', 'ExitNodeScanner starting up...')
try:
self.__client = Client(meta_host, meta_port)
@@ -134,7 +137,30 @@
reply = self.__client.readline()
if reply[:3] != '250': # first three chars indicate the reply code
reply += self.__client.readline()
+ plog('ERROR', 'Error configuring metatroller (' + command + ' failed)')
plog('ERROR', reply)
+ exit()
+
+ # establish a control port connection
+ try:
+ s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
+ s.connect((control_host, control_port))
+ c = Connection(s)
+ c.authenticate()
+ self.__control = c
+ except:
+ plog('ERROR', 'Couldn\'t connect to the control port')
+ exit()
+
+ # get a data handler
+ self.__datahandler = DataHandler()
+
+ # get the nodes tested so far
+ plog('INFO', 'Loading the previous run stats')
+ self.ssh_tested = Set([x.exit_node for x in self.__datahandler.getSsh()])
+ self.http_tested = Set([x.exit_node for x in self.__datahandler.getHttp()])
+ self.ssl_tested = Set([x.exit_node for x in self.__datahandler.getSsl()])
+
plog('INFO', 'ExitNodeScanner up and ready')
def get_exit_node(self):
@@ -152,23 +178,40 @@
plog('NOTICE','Current node: ' + self.__exit)
return self.__exit
+ def get_new_circuit(self):
+ plog('NOTICE', 'Trying to construct a new circuit')
+ self.__client.writeline("NEWEXIT")
+ reply = self.__client.readline()
+
+ if reply[:3] != '250':
+ plog('ERROR', 'Choosing a new exit failed')
+ plog('ERROR', reply)
+
+ def set_new_exit(self, exit):
+ plog('NOTICE', 'Trying to set ' + exit + ' as the exit for the next circuit')
+ self.__client.writeline("SETEXIT " + exit)
+ reply = self.__client.readline()
+
+ if reply[:3] != '250':
+ plog('ERROR', 'Setting ' + exit + ' as the new exit failed')
+ plog('ERROR', reply)
+
+ def report_bad_exit(self, exit):
+ # self__contol.set_option('AuthDirBadExit', exit) ?
+ pass
+
+ # get the list of nodes that allow to exit to a port
+ def get_nodes_for_port(self, port):
+ routers = self.__control.read_routers(self.__control.get_network_status())
+ restriction = ExitPolicyRestriction('255.255.255.255', port)
+ return [x for x in routers if restriction.r_is_ok(x)]
+
# finds nodes that allow connections over a common protocol
# while disallowing connections over its secure version
def check_all_exits_port_consistency(self):
- s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
- try:
- s.connect((control_host, control_port))
- except:
- plog('ERROR', 'Couldn\'t connect to the control port (' + `control_port` + ' was used)')
- return 0
-
- c = Connection(s)
- c.authenticate()
- plog('INFO', 'Connection to control port established')
-
# get the structure
- routers = c.read_routers(c.get_network_status())
+ routers = self.__control.read_routers(self.__control.get_network_status())
bad_exits = Set([])
specific_bad_exits = [None]*len(ports_to_check)
for i in range(len(ports_to_check)):
@@ -191,69 +234,79 @@
plog('INFO', 'Total bad exits: ' + `len(bad_exits)` + ' (~' + `(len(bad_exits) * 100 / len(routers))` + '%)')
def check_http(self, address):
- request = urllib2.Request(address)
- request.add_header('User-Agent', user_agent)
-
- plog('INFO', 'Opening ' + address + ' using the direct connection')
- try:
- f = urllib2.urlopen(request)
- except Exception, e:
- plog('ERROR', 'Opening ' + address + ' directly failed')
- plog('ERROR', e)
- return 0
+ plog('INFO', 'Conducting an http test with destination ' + address)
- content = f.read()
- content = content.decode('ascii', 'ignore')
-
- direct_page = BeautifulSoup(content)
-
defaultsocket = socket.socket
socks.setdefaultproxy(socks.PROXY_TYPE_SOCKS5, tor_host, tor_port)
socket.socket = socks.socksocket
+ pcontent = self.http_request(address)
+
+ # reset the connection to direct
+ socket.socket = defaultsocket
+
exit_node = self.get_exit_node()
- plog('INFO', 'Opening ' + address + ' using exit node ' + exit_node)
+ if exit_node == 0 or exit_node == '0':
+ plog('INFO', 'We had no exit node to test, skipping to the next test.')
+ return 0
+
+ # if we have no content, we had a connection error
+ # address[7:] means we're leaving out the 'http://' from the address part
+ if pcontent == 0:
+ result = HttpTestResult(exit_node, address, 0, TEST_INCONCLUSIVE)
+ result_file = open(http_i_dir + `exit_node` + '_' + address[7:] + '.result','w')
+ pickle.dump(result, result_file)
+ result_file.close()
+ return TEST_INCONCLUSIVE
+
+ elements = SoupStrainer(lambda name, attrs : name in tags_to_check or
+ len(Set(attrs).intersection(Set(attrs_to_check))) > 0)
+ pcontent = pcontent.decode('ascii', 'ignore')
+ psoup = BeautifulSoup(pcontent, parseOnlyThese=elements)
+
+ # load the original tag structure
+ # if we don't have any yet, get it
+ soup = 0
try:
- g = urllib2.urlopen(request)
+ tag_file = open(http_tags_dir + address[7:] + '.tags', 'r')
+ soup = BeautifulSoup(tag_file.read())
+ tag_file.close()
+ except IOError:
+ content = self.http_request(address)
+ content = content.decode('ascii','ignore')
+ soup = BeautifulSoup(content, parseOnlyThese=elements)
+ tag_file = open(http_tags_dir + '_' + address[7:] + '.tags', 'w')
+ tag_file.write(soup.__str__())
+ tag_file.close()
except Exception, e:
- plog('ERROR', 'Opening ' + address + ' via tor failed')
+ plog('ERROR', 'Failed to get the correct tag structure for ' + address)
plog('ERROR', e)
- socket.socket = defaultsocket
- return 0
+ return TEST_INCONCLUSIVE
+ if soup == 0:
+ plog('ERROR', 'Failed to get the correct tag structure for ' + address)
+ return TEST_INCONCLUSIVE
- pcontent = g.read()
- pcontent = pcontent.decode('ascii', 'ignore')
+ # compare the content
+ # if content matches, everything is ok
+ if psoup == soup:
+ plog('INFO', 'Content matches')
- node_page = BeautifulSoup(pcontent)
+ # if content doesnt match, update the direct content
+
+ # compare the new and old content
+ # if they match, means the node has been changing the content
- # reset the default connection
- socket.socket = defaultsocket
- # nasty tags: a, applet, div, embed, form, frame, iframe, img, link, script
- # also check DOM event stuff
-
- # address[7:] means we're leaving out the 'http://' from the address part
- #
- # just some temporary stuff (compare the whole content) that will be replaced soon
- #
- if md5.new(pcontent).digest() != md5.new(content).digest():
- plog('ERROR', 'The retrieved pages dont match')
-
- content_file_handle = open(http_tags_dir + address[7:] + '.result','w')
- content_file_handle.write(content)
- content_file_handle.close()
+ # if content has changed outside of tor, update the saved file
- pcontent_file_handle = open(http_tags_dir + `exit_node` + '_' + address[7:] + '.result','w')
- pcontent_file_handle.write(pcontent)
- pcontent_file_handle.close()
+ # compare the node content and the new content
+ # if it matches, everything is ok
- else:
- result = HttpTestResult(exit_node, address, 0, TEST_SUCCESS)
- result_file = open(http_s_dir + `exit_node` + '_' + address[7:] + '.result','w')
- pickle.dump(result, result_file)
- return 0
+ # if it doesn't match, means the node has been changing the content
+ return TEST_SUCCESS
+
def check_openssh(self, address):
ssh = pyssh.Ssh('username', 'host', 22)
ssh.set_sshpath(pyssh.SSH_PATH)
@@ -264,111 +317,205 @@
return 0
def check_openssl(self, address):
+ plog('INFO', 'Conducting an ssl test with destination ' + address)
- # specify the context
- ctx = SSL.Context(SSL.SSLv23_METHOD)
- ctx.set_verify_depth(1)
+ # get the cert via tor
- # ready the certificate request
- request = crypto.X509Req()
+ defaultsocket = socket.socket
+ socks.setdefaultproxy(socks.PROXY_TYPE_SOCKS5, tor_host, tor_port)
+ socket.socket = socks.socksocket
- # open a direct ssl connection
- s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
- c = SSL.Connection(ctx, s)
- c.set_connect_state()
+ cert = self.ssl_request(address)
- plog('INFO', 'Opening a direct ssl connection to ' + address)
-
+ # reset the connection method back to direct
+ socket.socket = defaultsocket
+
+ exit_node = self.get_exit_node()
+ if exit_node == 0 or exit_node == '0':
+ plog('INFO', 'We had no exit node to test, skipping to the next test.')
+ return TEST_FAILURE
+
+ # if we got no cert, there was an ssl error
+ if cert == 0:
+ result = OpenSSLTestResult(exit_node, address, 0, TEST_INCONCLUSIVE)
+ result_file = open(ssl_i_dir + `exit_node` + '_' + address + '.result','w')
+ pickle.dump(result, result_file)
+ result_file.close()
+ self.ssl_tested.add(exit_node)
+ return TEST_INCONCLUSIVE
+
+ # load the original cert and compare
+ # if we don't have the original cert yet, get it
+ original_cert = 0
try:
- c.connect((address, 443))
- c.send(crypto.dump_certificate_request(crypto.FILETYPE_ASN1,request))
- except socket.error, e:
- plog('ERROR','An error occured while opening a direct ssl connection to ' + address)
- plog('ERROR', e)
- return 0
+ cert_file = open(ssl_certs_dir + address + '.pem', 'r')
+ cert_string = cert_file.read()
+ original_cert = crypto.load_certificate(crypto.FILETYPE_PEM, cert_string)
+ except IOError:
+ plog('INFO', 'Opening a direct ssl connection to ' + address)
+ original_cert = self.ssl_request(address)
+ if original_cert.has_expired():
+ plog('ERROR', 'The ssl cert for ' + address + 'seems to have expired. Skipping to the next test...')
+ return TEST_INCONCLUSIVE
+ cert_file = open(ssl_certs_dir + address + '.pem', 'w')
+ cert_file.write(crypto.dump_certificate(crypto.FILETYPE_PEM, original_cert))
+ cert_file.close()
+ except:
+ plog('ERROR', 'Error occured while acquiring the correct cert (' + ssl_certs_dir + address + '.pem)')
+ return TEST_INCONCLUSIVE
+ if original_cert == 0:
+ plog('ERROR', 'Error getting the correct cert for ' + address)
+ return TEST_INCONCLUSIVE
- cert = c.get_peer_certificate()
+ # get an easily comparable representation of the certs
cert_pem = crypto.dump_certificate(crypto.FILETYPE_PEM, cert)
+ original_cert_pem = crypto.dump_certificate(crypto.FILETYPE_PEM, original_cert)
- # save the cert
+ # in any case we can consider the node looked at
+ self.ssl_tested.add(exit_node)
- cert_file_handle = open(ssl_certs_dir + address + '.pem', 'w')
- cert_file_handle.write(cert_pem)
- cert_file_handle.close()
+ # if certs match, everything is ok
+ if cert_pem == original_cert_pem:
+ cert_file = ssl_certs_dir + address + '.pem'
+ result = OpenSSLTestResult(exit_node, address, cert_file, TEST_SUCCESS)
+ result_file = open(ssl_s_dir + `exit_node` + '_' + address + '.result','w')
+ pickle.dump(result, result_file)
+ result_file.close()
+ return TEST_SUCCESS
+
+ # if certs dont match, open up a direct connection and update the cert
+ plog('INFO', 'Opening a direct ssl connection to ' + address)
+ original_cert_new = self.ssl_request(address)
+ if original_cert_new == 0:
+ plog('ERROR', 'Error getting the correct cert for ' + address)
+ result = OpenSSLTestResult(exit_node, address, 0, TEST_INCONCLUSIVE)
+ result_file = open(ssl_i_dir + `exit_node` + '_' + address + '.result','w')
+ pickle.dump(result, result_file)
+ result_file.close()
+ return TEST_INCONCLUSIVE
- # if the original certificate was invalid, stop here
- if cert.has_expired():
- plog('INFO', 'SSL certificate of the ' + address + ' server has expired. Skipping to the next test')
- return 0
+ original_cert_new_pem = crypto.dump_certificate(crypto.FILETYPE_PEM, original_cert_new)
- # check whether we already have a circuit.
- # if yes, open a connection via tor, otherwise skip to the next test
- exit_node = self.get_exit_node()
- if exit_node == 0 or exit_node == '0':
- plog('INFO', 'We have no exit node to test, skipping to the next test.')
- return 0
+ # compare the old and new cert
+ # if certs match, means the exit node has been messing with the cert
+ if original_cert_pem == original_cert_new_pem:
+ plog('ERROR', 'Exit node ' + `exit_node` + ' seems to be meddling with certificates. (' + address + ')')
- defaultsocket = socket.socket
- socks.setdefaultproxy(socks.PROXY_TYPE_SOCKS5, tor_host, tor_port)
- socket.socket = socks.socksocket
+ cert_file_name = ssl_certs_dir + address + '_' + `exit_node` + '.pem'
+ cert_file = open(cert_file, 'w')
+ cert_file.write(cert_pem)
+ cert_file.close()
- s2 = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
- c2 = SSL.Connection(ctx, s2)
- c2.set_connect_state()
-
- plog('INFO', 'Opening an ssl connection to ' + address + ' using exit node ' + `exit_node`)
+ result = OpenSSLTestResult(exit_node, address, cert_file_name, TEST_FAILURE)
+ result_file = open(ssl_f_dir + `exit_node` + '_' + address + '.result','w')
+ pickle.dump(result, result_file)
+ result_file.close()
+ return TEST_FAILURE
+ # if comparsion fails, replace the old cert with the new one
+ cert_file = open(ssl_certs_dir + address + '.pem', 'w')
+ cert_file.write(original_cert_new_pem)
+ cert_file.close()
+
+ # compare the new cert and the node cert
+ # if certs match, everything is ok
+ if cert_pem == original_cert_new_pem:
+ cert_file = ssl_certs_dir + address + '.pem'
+ result = OpenSSLTestResult(exit_node, address, cert_file, TEST_SUCCESS)
+ result_file = open(ssl_s_dir + `exit_node` + '_' + address + '.result','w')
+ pickle.dump(result, result_file)
+ result_file.close()
+ return TEST_SUCCESS
+
+ # if certs dont match, means the exit node has been messing with the cert
+ plog('ERROR', 'Exit node ' + `exit_node` + ' seems to be meddling with certificates. (' + address + ')')
+
+ cert_file_name = ssl_certs_dir + address + '_' + `exit_node` + '.pem'
+ cert_file = open(cert_file, 'w')
+ cert_file.write(cert_pem)
+ cert_file.close()
+
+ result = OpenSSLTestResult(exit_node, address, cert_file_name, TEST_FAILURE)
+ result_file = open(ssl_f_dir + `exit_node` + '_' + address + '.result','w')
+ pickle.dump(result, result_file)
+ result_file.close()
+
+ return TEST_FAILURE
+
+ # stub for checking whether smtp & tls function properly
+ def check_smtp(self, address):
try:
- c2.connect((address, 443))
+ s = smtplib.SMTP(address)
+ c = s.ehlo()[0]
+ if not c>= 200 or c <= 299:
+ return 0
+ if not s.has_extn('starttls'):
+ return 0
+ c = s.ehlo()[0]
+ if not c>= 200 or c <= 299:
+ return 0
+ except:
+ pass
+
+ # stub for checking whether pop & tls function properly
+ def check_pop(self, address):
+ pass
+
+ # stub for checking whether imap & tls function properly
+ def check_imap(self, address):
+ pass
+
+ def http_request(self, address):
+
+ request = 0
+ try:
+ request = urllib2.Request(address)
+ request.add_header('User-Agent', user_agent)
except Exception, e:
- plog('ERROR','An error occured while opening a direct ssl connection to ' + address)
+ plog('ERROR', 'Forming a http request to ' + address + ' failed.')
plog('ERROR', e)
return 0
-
+
+ content = 0
try:
- c2.send(crypto.dump_certificate_request(crypto.FILETYPE_ASN1,request))
+ reply = urllib2.urlopen(request)
+ content = reply.read()
except Exception, e:
- plog('ERROR', 'Got the following ssl error while connecting via tor:')
+ plog('ERROR', 'Executing a http request to ' + address + ' failed')
plog('ERROR', e)
- cert_file = ssl_certs_dir + address + '.pem'
- result = OpenSSLTestResult(exit_node, address, cert_file, TEST_INCONCLUSIVE)
- result_file = open(ssl_i_dir + `exit_node` + '_' + address + '.result','w')
- pickle.dump(result, result_file)
return 0
- cert2 = c2.get_peer_certificate()
- cert2_pem = crypto.dump_certificate(crypto.FILETYPE_PEM, cert2)
+ return content
- # compare the received cert to the original
- #
- # if certs match, save the result of the test only. no need to keep the same cert in two files
- #
- # if certs are different, file the test result under positive cases
- # save the received cert for inspection
+ def ssh_request(self):
+ pass
- if cert_pem == cert2_pem:
- cert_file = ssl_certs_dir + address + '.pem'
- result = OpenSSLTestResult(exit_node, address, cert_file, TEST_SUCCESS)
- result_file = open(ssl_s_dir + `exit_node` + '_' + address + '.result','w')
- pickle.dump(result, result_file)
- else:
- plog('ERROR', 'Exit node ' + `exit_node` + ' seems to be meddling with certificates. (' + address + ')')
+ def ssl_request(self, address):
- cert_file = ssl_certs_dir + address + '_' + `exit_node` + '.pem'
- cert_file_handle = open(cert_file, 'w')
- cert_file_handle.write(cert2_pem)
- cert_file_handle.close()
+ # specify the context
+ ctx = SSL.Context(SSL.SSLv23_METHOD)
+ ctx.set_verify_depth(1)
- result = OpenSSLTestResult(exit_node, address, cert_file, TEST_FAILURE)
- result_file = open(ssl_f_dir + `exit_node` + '_' + address + '.result','w')
- pickle.dump(result, result_file)
+ # ready the certificate request
+ request = crypto.X509Req()
- plog('INFO', 'Test complete. Moving on...')
- # reset the default connection
- socket.socket = defaultsocket
+ # open an ssl connection
+ s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
+ c = SSL.Connection(ctx, s)
+ c.set_connect_state()
- return 0
+ try:
+ c.connect((address, 443))
+ c.send(crypto.dump_certificate_request(crypto.FILETYPE_PEM,request))
+ except Exception, e:
+ plog('ERROR','An error occured while opening an ssl connection to ' + address)
+ plog('ERROR', e)
+ return 0
+
+ # return the cert
+ return c.get_peer_certificate()
+
# some helpful methods
'''
@@ -423,7 +570,6 @@
urls = []
content = response.read()
- content = content.decode('ascii', 'ignore')
soup = BeautifulSoup(content)
@@ -446,28 +592,64 @@
# main logic
#
def main(argv):
- scanner = ExitNodeScanner(meta_host, meta_port)
+ scanner = ExitNodeScanner()
# consistency test
# scanner.check_all_exits_port_consistency()
- while 1:
-
# find sites for http testing if necessary
#
# global doc_urls
# doc_urls.extend(load_url_list())
# doc_urls = list(Set(doc_urls))
# plog('NOTICE', 'Final URL list: ' + '\n'.join(doc_urls) + '\n')
+
+ # get the number of nodes that need to be tested
+ ssl_nodes = len(scanner.get_nodes_for_port(443))
+ http_nodes = len(scanner.get_nodes_for_port(80))
+ ssh_nodes = len(scanner.get_nodes_for_port(22))
+
+ ssl_done = False
+ http_done = False
+ ssh_done = True
+ while 1:
- # https test
- for ssl_site in docs_https:
- scanner.check_openssl(ssl_site)
-
- # http test
- for http_site in docs_http:
- scanner.check_http(http_site)
+ # https test
+
+ if not ssl_done:
+ for ssl_site in docs_https:
+ scanner.check_openssl(ssl_site)
+ ssl_tested_n = len(scanner.ssl_tested)
+ plog('INFO', 'Nodes ssl-tested: ' + `ssl_tested_n` + '/' + `ssl_nodes`
+ + ' (~' + `((ssl_tested_n * 100) / ssl_nodes)` + '%)')
+ if ssl_tested_n == ssl_nodes:
+ ssl_done = True
+
+ # http test
+
+ if not http_done:
+ for http_site in docs_http:
+ scanner.check_http(http_site)
+ http_tested_n = len(scanner.http_tested)
+ plog('INFO', 'Nodes http-tested: ' + `http_tested_n` + '/' + `http_nodes`
+ + ' (~' + `((http_tested_n * 100) / http_nodes)` + '%)')
+ if http_tested_n == http_nodes:
+ http_done = True
+ # ssh test
+ '''
+ if not ssh_done:
+ pass
+ '''
+
+ # check whether we're done, otherwise get a new circuit
+ if ssl_done and http_done and ssh_done:
+ plog('INFO','Wow! We have tested the whole tor network. Check soatstats.py for results')
+ break
+ else:
+ scanner.get_new_circuit()
+ time.sleep(1)
+
#
# initiate the program
#
Modified: torflow/branches/gsoc2008/soatstats.py
===================================================================
--- torflow/branches/gsoc2008/soatstats.py 2008-06-27 17:52:54 UTC (rev 15515)
+++ torflow/branches/gsoc2008/soatstats.py 2008-06-27 18:07:21 UTC (rev 15516)
@@ -19,15 +19,18 @@
ssl_s_dir = data_dir + 'ssl/successful/'
ssl_f_dir = data_dir + 'ssl/failed/'
ssl_i_dir = data_dir + 'ssl/inconclusive/'
+ssl_result_dirs = [ssl_s_dir, ssl_f_dir, ssl_i_dir]
http_tags_dir = data_dir + 'http/tags/'
http_s_dir = data_dir + 'http/successful/'
http_f_dir = data_dir + 'http/failed/'
http_i_dir = data_dir + 'http/inconclusive/'
+http_result_dirs = [http_s_dir, http_f_dir, http_i_dir]
ssh_s_dir = data_dir + 'ssh/successful/'
ssh_f_dir = data_dir + 'ssh/failed/'
ssh_i_dir = data_dir + 'ssh/inconclusive/'
+ssh_result_dirs = [ssh_s_dir, ssh_f_dir, ssh_i_dir]
result_dirs = [ssl_s_dir, ssl_f_dir, ssl_i_dir,
http_s_dir, http_f_dir, http_i_dir]
@@ -91,12 +94,30 @@
filtered = list(results)
return filtered
+
+ # filter by node
+ def filterByNode(self, results, id):
+ return filter(lambda x: x.exit_node == id, results)
# get all available results
def getAll(self):
+ return self.__getResults(result_dirs)
+
+ # get results per protocol
+ def getSsh(self):
+ return self.__getResults(ssh_result_dirs)
+
+ def getHttp(self):
+ return self.__getResults(http_result_dirs)
+
+ def getSsl(self):
+ return self.__getResults(ssl_result_dirs)
+
+ # gather results from given directories
+ def __getResults(self, dirs):
results = []
- for dir in result_dirs:
+ for dir in dirs:
files = os.listdir(dir)
for file in files:
if file[-6:] == 'result':
@@ -106,10 +127,6 @@
return results
- # filter by node
- def filterByNode(self, results, id):
- return filter(lambda x: x.exit_node == id, results)
-
#
# Displaying stats on the console
#
@@ -135,7 +152,11 @@
def Summary(self):
dh = DataHandler()
data = dh.getAll()
+
nodeSet = Set([])
+ sshSet = Set([])
+ sslSet = Set([])
+ httpSet = Set([])
total = len(data)
good = 0
@@ -156,10 +177,13 @@
bad += 1
if result.__class__.__name__ == 'OpenSSHTestResult':
+ sshSet.add(result.exit_node)
ssh += 1
elif result.__class__.__name__ == 'HttpTestResult':
+ httpSet.add(result.exit_node)
http += 1
elif result.__class__.__name__ == 'OpenSSLTestResult':
+ sslSet.add(result.exit_node)
ssl += 1
swidth = 25
@@ -175,6 +199,9 @@
print format % (swidth, 'Tests completed', nwidth, total)
print format % (swidth, 'Nodes tested', nwidth, len(nodeSet))
+ print format % (swidth, 'Nodes SSL-tested', nwidth, len(sslSet))
+ print format % (swidth, 'Nodes HTTP-tested', nwidth, len(httpSet))
+ print format % (swidth, 'Nodes SSH-tested', nwidth, len(sshSet))
print format % (swidth, 'Failed tests', nwidth, bad)
print format % (swidth, 'Succeeded tests', nwidth, good)
print format % (swidth, 'Inconclusive tests', nwidth, inconclusive)