[Author Prev][Author Next][Thread Prev][Thread Next][Author Index][Thread Index]
[or-cvs] [torflow/master 5/8] Replaced deprecated sets.Set with set
Author: John M. Schanck <john@xxxxxxxxxxx>
Date: Sun, 16 May 2010 00:18:18 -0400
Subject: Replaced deprecated sets.Set with set
Commit: eeb5f74e8584af4d2f2bf2a99aad6a775472eec7
---
NetworkScanners/ExitAuthority/libsoat.py | 35 ++++++++++----------
NetworkScanners/ExitAuthority/soat.py | 47 +++++++++++++++-------------
NetworkScanners/ExitAuthority/soatstats.py | 6 ++--
3 files changed, 46 insertions(+), 42 deletions(-)
diff --git a/NetworkScanners/ExitAuthority/libsoat.py b/NetworkScanners/ExitAuthority/libsoat.py
index f612cd2..848a4b6 100644
--- a/NetworkScanners/ExitAuthority/libsoat.py
+++ b/NetworkScanners/ExitAuthority/libsoat.py
@@ -14,12 +14,13 @@ import copy
import socket
import struct
+if sys.version_info < (2, 5):
+ from sets import Set as set
+
from OpenSSL import crypto
from soat import Tag, SoupStrainer
-import sets
-from sets import Set
from soat_config_real import *
sys.path.append("../../")
@@ -692,7 +693,7 @@ class DataHandler:
# These three bits are needed to fully recursively strain the parsed soup.
# For some reason, the SoupStrainer does not get applied recursively..
__first_strainer = SoupStrainer(lambda name, attrs: name in tags_to_check or
- len(Set(map(lambda a: a[0], attrs)).intersection(Set(attrs_to_check))) > 0)
+ len(set(map(lambda a: a[0], attrs)).intersection(set(attrs_to_check))) > 0)
def __tag_not_worthy(tag):
if tag.name in tags_to_check:
@@ -804,19 +805,19 @@ class SoupDiffer:
pass
def _get_tags(self, soup):
- return sets.Set(map(str,
+ return set(map(str,
[tag for tag in soup.findAll() if isinstance(tag, Tag)]))
def _get_attributes(self, soup):
attr_soup = [(tag.name, tag.attrs) for tag in soup.findAll()]
- attrs = sets.Set([])
+ attrs = set([])
for (tag, attr_list) in attr_soup:
for at in attr_list:
attrs.add((tag, at))
return attrs
def _get_content(self, soup):
- return sets.Set(map(str,
+ return set(map(str,
[tag for tag in soup.findAll() if not isinstance(tag, Tag)]))
def _update_changed_tag_map(self, tags_old, tags_new):
@@ -826,7 +827,7 @@ class SoupDiffer:
for tags in map(TheChosenSoup, changed_tags):
for t in tags.findAll():
if t.name not in changed_tags:
- self.changed_tag_map[t.name] = sets.Set([])
+ self.changed_tag_map[t.name] = set([])
for attr in t.attrs:
self.changed_tag_map[t.name].add(attr[0])
@@ -837,7 +838,7 @@ class SoupDiffer:
changed_attributes = list(attrs_new - attrs_old)
for (tag, attr) in changed_attributes:
if tag not in self.changed_attr_map:
- self.changed_attr_map[tag] = sets.Set([])
+ self.changed_attr_map[tag] = set([])
self.changed_attr_map[tag].add(attr[0])
def _update_changed_content(self, content_old, content_new):
@@ -853,9 +854,9 @@ class SoupDiffer:
self._update_changed_tag_map(self.tag_pool, tags)
self._update_changed_attr_map(self.attr_pool, attrs)
self._update_changed_content(self.content_pool, cntnt)
- self.tag_pool.union_update(tags)
- self.attr_pool.union_update(attrs)
- self.content_pool.union_update(cntnt)
+ self.tag_pool.update(tags)
+ self.attr_pool.update(attrs)
+ self.content_pool.update(cntnt)
def show_changed_tags(self, soup):
soup_tags = self._get_tags(soup)
@@ -894,8 +895,8 @@ class SoupDiffer:
class HeaderDiffer:
def __init__(self, orig_headers):
- self.header_pool = sets.Set(orig_headers)
- self.changed_headers = sets.Set([])
+ self.header_pool = set(orig_headers or [])
+ self.changed_headers = set([])
self._pickle_revision = 0
def filter_headers(headers):
@@ -906,22 +907,22 @@ class HeaderDiffer:
if re.match(i, h[0]):
matched = True
if not matched: ret.append(h)
- return sets.Set(ret)
+ return set(ret)
filter_headers = Callable(filter_headers)
def depickle_upgrade(self):
pass
def prune_differences(self, new_headers):
- new_headers = sets.Set(new_headers)
+ new_headers = set(new_headers or [])
changed = new_headers - self.header_pool
for i in changed:
self.changed_headers.add(i[0])
- self.header_pool.union_update(new_headers)
+ self.header_pool.update(new_headers)
def show_differences(self, new_headers):
ret = ""
- changed = sets.Set(new_headers) - self.header_pool
+ changed = set(new_headers or []) - self.header_pool
for i in changed:
if i[0] not in self.changed_headers:
ret += " "+i[0]+": "+i[1]+"\n"
diff --git a/NetworkScanners/ExitAuthority/soat.py b/NetworkScanners/ExitAuthority/soat.py
index d30b235..faa13af 100755
--- a/NetworkScanners/ExitAuthority/soat.py
+++ b/NetworkScanners/ExitAuthority/soat.py
@@ -48,6 +48,9 @@ import Queue
import threading
import atexit
+if sys.version_info < (2, 5):
+ from sets import Set as set
+
# Import the correct BeautifulSoup
try:
# Try system-wide BeautifulSoup
@@ -230,7 +233,7 @@ class ExitScanHandler(ScanSupport.ScanHandler):
# get the structure
routers = self.c.read_routers(self.c.get_network_status())
- bad_exits = Set([])
+ bad_exits = set([])
specific_bad_exits = [None]*len(ports_to_check)
for i in range(len(ports_to_check)):
specific_bad_exits[i] = []
@@ -360,10 +363,10 @@ class Test:
self.port = port
self.min_targets = min_targets
self.filename = None
- self.rescan_nodes = sets.Set([])
- self.nodes = sets.Set([])
+ self.rescan_nodes = set([])
+ self.nodes = set([])
self.node_map = {}
- self.banned_targets = sets.Set([])
+ self.banned_targets = set([])
self.total_nodes = 0
self.scan_nodes = 0
self.nodes_to_mark = 0
@@ -386,7 +389,7 @@ class Test:
for addr in self.successes.keys():
if type(self.successes[addr]) == int:
- self.successes[addr] = sets.Set(xrange(0,self.successes[addr]))
+ self.successes[addr] = set(xrange(0,self.successes[addr]))
plog("INFO", "Upgraded "+self.__class__.__name__+" to v1")
if self._pickle_revision < 2:
self._pickle_revision = 2
@@ -445,7 +448,7 @@ class Test:
self.results.remove(r)
def load_rescan(self, type, since=None):
- self.rescan_nodes = sets.Set([])
+ self.rescan_nodes = set([])
results = datahandler.getAll()
for r in results:
if r.status == type:
@@ -461,7 +464,7 @@ class Test:
def toggle_rescan(self):
if self.rescan_nodes:
plog("NOTICE", self.proto+" rescan complete. Switching back to normal scan")
- self.rescan_nodes = sets.Set([])
+ self.rescan_nodes = set([])
self.tests_per_node = num_tests_per_node
self.update_nodes()
else:
@@ -477,8 +480,8 @@ class Test:
for n in nodes:
self.node_map[n.idhex] = n
self.total_nodes = len(nodes)
- self.nodes = sets.Set(map(lambda n: n.idhex, nodes))
- marked_nodes = sets.Set(self.node_results.keys())
+ self.nodes = set(map(lambda n: n.idhex, nodes))
+ marked_nodes = set(self.node_results.keys())
self.nodes -= marked_nodes # Remove marked nodes
# Only scan the stuff loaded from the rescan
if self.rescan_nodes:
@@ -600,7 +603,7 @@ class Test:
if result.site in self.successes:
self.successes[result.site].add(result.exit_node)
else:
- self.successes[result.site]=sets.Set([result.exit_node])
+ self.successes[result.site]=set([result.exit_node])
win_cnt = len(self.successes[result.site])
@@ -614,7 +617,7 @@ class Test:
if result.site in self.connect_fails:
self.connect_fails[result.site].add(result.exit_node)
else:
- self.connect_fails[result.site] = sets.Set([result.exit_node])
+ self.connect_fails[result.site] = set([result.exit_node])
err_cnt = len(self.connect_fails[result.site])
@@ -692,7 +695,7 @@ class Test:
if result.site in self.exit_fails:
self.exit_fails[result.site].add(result.exit_node)
else:
- self.exit_fails[result.site] = sets.Set([result.exit_node])
+ self.exit_fails[result.site] = set([result.exit_node])
err_cnt = len(self.exit_fails[result.site])
@@ -706,7 +709,7 @@ class Test:
if result.site in self.dynamic_fails:
self.dynamic_fails[result.site].add(result.exit_node)
else:
- self.dynamic_fails[result.site] = sets.Set([result.exit_node])
+ self.dynamic_fails[result.site] = set([result.exit_node])
err_cnt = len(self.dynamic_fails[result.site])
@@ -755,9 +758,9 @@ class SearchBasedTest(Test):
'''
plog('INFO', 'Searching google for relevant sites...')
- urllist = Set([])
+ urllist = set([])
for filetype in filetypes:
- type_urls = Set([])
+ type_urls = set([])
while len(type_urls) < results_per_type:
query = random.choice(self.wordlist)
@@ -837,8 +840,8 @@ class SearchBasedTest(Test):
# make sure we don't get more urls than needed
if len(type_urls) > results_per_type:
- type_urls = Set(random.sample(type_urls, results_per_type))
- urllist.union_update(type_urls)
+ type_urls = set(random.sample(type_urls, results_per_type))
+ urllist.update(type_urls)
return list(urllist)
@@ -970,7 +973,7 @@ class HTTPTest(SearchBasedTest):
if result.site in self.httpcode_fails:
self.httpcode_fails[result.site].add(result.exit_node)
else:
- self.httpcode_fails[result.site] = sets.Set([result.exit_node])
+ self.httpcode_fails[result.site] = set([result.exit_node])
err_cnt = len(self.httpcode_fails[result.site])
@@ -1365,7 +1368,7 @@ class HTMLTest(HTTPTest):
address = random.choice(self.targets)
# Keep a trail log for this test and check for loops
- fetched = sets.Set([])
+ fetched = set([])
self.fetch_queue.append(("html", address, first_referer))
n_success = n_fail = n_inconclusive = 0
@@ -1476,7 +1479,7 @@ class HTMLTest(HTTPTest):
if not found_favicon:
targets.insert(0, ("image", urlparse.urljoin(orig_addr, "/favicon.ico")))
- loaded = sets.Set([])
+ loaded = set([])
for i in targets:
if i[1] in loaded:
@@ -1593,7 +1596,7 @@ class HTMLTest(HTTPTest):
# Also find recursive urls
recurse_elements = SoupStrainer(lambda name, attrs:
name in tags_to_recurse and
- len(Set(map(lambda a: a[0], attrs)).intersection(Set(attrs_to_recurse))) > 0)
+ len(set(map(lambda a: a[0], attrs)).intersection(set(attrs_to_recurse))) > 0)
self._add_recursive_targets(TheChosenSoup(tor_html.decode('ascii',
'ignore'), recurse_elements), address)
@@ -2457,7 +2460,7 @@ class DNSTest(Test):
return TEST_SUCCESS
exit_node = "$"+exit_node.idhex
- ips_d = Set([])
+ ips_d = set([])
try:
results = socket.getaddrinfo(address,None)
for result in results:
diff --git a/NetworkScanners/ExitAuthority/soatstats.py b/NetworkScanners/ExitAuthority/soatstats.py
index bcb58a0..db12c48 100755
--- a/NetworkScanners/ExitAuthority/soatstats.py
+++ b/NetworkScanners/ExitAuthority/soatstats.py
@@ -9,8 +9,8 @@ import pickle
import sys
import time
-import sets
-from sets import Set
+if sys.version_info < (2, 5):
+ from sets import Set as set
import libsoat
from libsoat import *
@@ -37,7 +37,7 @@ def main(argv):
reason_counts = {}
nodeResults = {}
- tests = Set([])
+ tests = set([])
total = len(data)
--
1.6.5