[Author Prev][Author Next][Thread Prev][Thread Next][Author Index][Thread Index]

[tor-commits] [onionperf/master] Port OnionPerf to Python 3.



commit e5f15ea4328e1e0588e249314d002d7dbb647d07
Author: Karsten Loesing <karsten.loesing@xxxxxxx>
Date:   Tue May 5 16:21:43 2020 +0200

    Port OnionPerf to Python 3.
    
    Fixes #29367.
---
 onionperf/analysis.py                |  8 +++---
 onionperf/docs/conf.py               | 20 +++++++--------
 onionperf/measurement.py             | 20 ++++++++-------
 onionperf/model.py                   | 10 +++-----
 onionperf/onionperf                  |  4 +--
 onionperf/tests/test_reprocessing.py |  4 +--
 onionperf/tests/test_utils.py        |  8 +++---
 onionperf/util.py                    | 47 +++++++++---------------------------
 onionperf/visualization.py           |  8 +++---
 run_tests.sh                         |  2 +-
 setup.py                             |  2 +-
 11 files changed, 52 insertions(+), 81 deletions(-)

diff --git a/onionperf/analysis.py b/onionperf/analysis.py
index e90d005..82db3c8 100644
--- a/onionperf/analysis.py
+++ b/onionperf/analysis.py
@@ -17,7 +17,7 @@ from stem.response.events import CircuitEvent, CircMinorEvent, StreamEvent, Band
 from stem.response import ControlMessage, convert
 
 # onionperf imports
-import util
+from . import util
 
 ERRORS = {  'AUTH' : 'TGEN/AUTH',
             'READ' : 'TGEN/READ',
@@ -286,7 +286,7 @@ class Analysis(object):
                         output.write("@type torperf 1.1\r\n")
                         output_str = ' '.join("{0}={1}".format(k, d[k]) for k in sorted(d.keys()) if d[k] is not None).strip()
                         output.write("{0}\r\n".format(output_str))
-                    except KeyError, e:
+                    except KeyError as e:
                         logging.warning("KeyError while exporting torperf file, missing key '{0}', skipping transfer '{1}'".format(str(e), xfer_db['transfer_id']))
                         continue
 
@@ -431,8 +431,7 @@ class Transfer(object):
             d['elapsed_seconds']['payload_progress'] = {decile: self.payload_progress[decile] - e.unix_ts_start for decile in self.payload_progress if self.payload_progress[decile] is not None}
         return d
 
-class Parser(object):
-    __metaclass__ = ABCMeta
+class Parser(object, metaclass=ABCMeta):
     @abstractmethod
     def parse(self, source, do_simple):
         pass
@@ -837,7 +836,6 @@ class TorCtlParser(Parser):
             except:
                 continue
         source.close()
-        print len(self.streams), len(self.circuits)
 
     def get_data(self):
         return {'circuits': self.circuits, 'circuits_summary': self.circuits_summary,
diff --git a/onionperf/docs/conf.py b/onionperf/docs/conf.py
index f3d2b34..6344c90 100644
--- a/onionperf/docs/conf.py
+++ b/onionperf/docs/conf.py
@@ -19,14 +19,14 @@ sys.path.insert(0, os.path.abspath('..'))
 
 # -- Project information -----------------------------------------------------
 
-project = u'onionperf'
-copyright = u'2019, Ana Custura'
-author = u'Ana Custura'
+project = 'onionperf'
+copyright = '2019, Ana Custura'
+author = 'Ana Custura'
 
 # The short X.Y version
-version = u''
+version = ''
 # The full version, including alpha/beta/rc tags
-release = u''
+release = ''
 
 
 # -- General configuration ---------------------------------------------------
@@ -66,7 +66,7 @@ language = None
 # List of patterns, relative to source directory, that match files and
 # directories to ignore when looking for source files.
 # This pattern also affects html_static_path and html_extra_path .
-exclude_patterns = [u'_build', 'Thumbs.db', '.DS_Store']
+exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store']
 
 # The name of the Pygments (syntax highlighting) style to use.
 pygments_style = 'sphinx'
@@ -131,8 +131,8 @@ latex_elements = {
 # (source start file, target name, title,
 #  author, documentclass [howto, manual, or own class]).
 latex_documents = [
-    (master_doc, 'onionperf.tex', u'onionperf Documentation',
-     u'Ana Custura', 'manual'),
+    (master_doc, 'onionperf.tex', 'onionperf Documentation',
+     'Ana Custura', 'manual'),
 ]
 
 
@@ -141,7 +141,7 @@ latex_documents = [
 # One entry per manual page. List of tuples
 # (source start file, name, description, authors, manual section).
 man_pages = [
-    (master_doc, 'onionperf', u'onionperf Documentation',
+    (master_doc, 'onionperf', 'onionperf Documentation',
      [author], 1)
 ]
 
@@ -152,7 +152,7 @@ man_pages = [
 # (source start file, target name, title, author,
 #  dir menu entry, description, category)
 texinfo_documents = [
-    (master_doc, 'onionperf', u'onionperf Documentation',
+    (master_doc, 'onionperf', 'onionperf Documentation',
      author, 'onionperf', 'One line description of project.',
      'Miscellaneous'),
 ]
diff --git a/onionperf/measurement.py b/onionperf/measurement.py
index 467ff61..91952a5 100644
--- a/onionperf/measurement.py
+++ b/onionperf/measurement.py
@@ -4,7 +4,7 @@
   See LICENSE for licensing information
 '''
 
-import os, traceback, subprocess, threading, Queue, logging, time, datetime, re, shlex
+import os, traceback, subprocess, threading, queue, logging, time, datetime, re, shlex
 from lxml import etree
 
 # stem imports
@@ -14,7 +14,7 @@ from stem.version import Version, Requirement, get_system_tor_version
 from stem import __version__ as stem_version
 
 # onionperf imports
-import analysis, monitor, model, util
+from . import analysis, monitor, model, util
 
 def generate_docroot_index(docroot_path):
     root = etree.Element("files")
@@ -22,7 +22,7 @@ def generate_docroot_index(docroot_path):
     for filename in filepaths:
         e = etree.SubElement(root, "file")
         e.set("name", filename)
-    with open("{0}/index.xml".format(docroot_path), 'wb') as f: print >> f, etree.tostring(root, pretty_print=True, xml_declaration=True)
+    with open("{0}/index.xml".format(docroot_path), 'wt') as f: print(etree.tostring(root, pretty_print=True, xml_declaration=True), file=f)
 
 def readline_thread_task(instream, q):
     # wait for lines from stdout until the EOF
@@ -49,7 +49,8 @@ def watchdog_thread_task(cmd, cwd, writable, done_ev, send_stdin, ready_search_s
         # wait for a string to appear in stdout if requested
         if ready_search_str is not None:
             boot_re = re.compile(ready_search_str)
-            for line in iter(subp.stdout.readline, b''):
+            for bytes in iter(subp.stdout.readline, b''):
+                line = bytes.decode('utf-8')
                 writable.write(line)
                 if boot_re.search(line):
                     break  # got it!
@@ -59,7 +60,7 @@ def watchdog_thread_task(cmd, cwd, writable, done_ev, send_stdin, ready_search_s
             ready_ev.set()
 
         # a helper will block on stdout and return lines back to us in a queue
-        stdout_q = Queue.Queue()
+        stdout_q = queue.Queue()
         t = threading.Thread(target=readline_thread_task, args=(subp.stdout, stdout_q))
         t.start()
 
@@ -67,9 +68,9 @@ def watchdog_thread_task(cmd, cwd, writable, done_ev, send_stdin, ready_search_s
         # sure that the subprocess is still alive and the master doesn't want us to quit
         while subp.poll() is None and done_ev.is_set() is False:
             try:
-                line = stdout_q.get(True, 1)
-                writable.write(line)
-            except Queue.Empty:
+                bytes = stdout_q.get(True, 1)
+                writable.write(bytes.decode('utf-8'))
+            except queue.Empty:
                 # the queue is empty and the get() timed out, recheck loop conditions
                 continue
 
@@ -100,7 +101,8 @@ def watchdog_thread_task(cmd, cwd, writable, done_ev, send_stdin, ready_search_s
 
         # helper thread is done, make sure we drain the remaining lines from the stdout queue
         while not stdout_q.empty():
-            writable.write(stdout_q.get_nowait())
+            bytes = stdout_q.get_nowait()
+            writable.write(bytes.decode('utf-8'))
         # if we have too many failures, exit the watchdog to propogate the error up
         if len(failure_times) > 10:
             break
diff --git a/onionperf/model.py b/onionperf/model.py
index 3c057c5..a5e0787 100644
--- a/onionperf/model.py
+++ b/onionperf/model.py
@@ -5,16 +5,14 @@
 '''
 
 from abc import ABCMeta, abstractmethod
-from cStringIO import StringIO
+from io import StringIO
 from networkx import read_graphml, write_graphml, DiGraph
 
-class TGenModel(object):
+class TGenModel(object, metaclass=ABCMeta):
     '''
     an action-dependency graph model for Shadow's traffic generator
     '''
 
-    __metaclass__ = ABCMeta
-
     def dump_to_string(self):
         s = StringIO()
         write_graphml(self.graph, s)
@@ -42,9 +40,7 @@ class TGenLoadableModel(TGenModel):
         model_instance = cls(graph)
         return model_instance
 
-class GeneratableTGenModel(TGenModel):
-
-    __metaclass__ = ABCMeta
+class GeneratableTGenModel(TGenModel, metaclass=ABCMeta):
 
     @abstractmethod
     def generate(self):
diff --git a/onionperf/onionperf b/onionperf/onionperf
index 45ead3c..536d6e2 100755
--- a/onionperf/onionperf
+++ b/onionperf/onionperf
@@ -1,4 +1,4 @@
-#!/usr/bin/env python
+#!/usr/bin/env python3
 
 '''
   OnionPerf
@@ -479,7 +479,7 @@ def visualize(args):
     tor_viz = TorVisualization()
 
     for (path, label) in args.datasets:
-        nextformat = lfcycle.next()
+        nextformat = next(lfcycle)
 
         anal = Analysis.load(filename=path)
         if anal is not None:
diff --git a/onionperf/tests/test_reprocessing.py b/onionperf/tests/test_reprocessing.py
index 76d5b4b..efacc5f 100644
--- a/onionperf/tests/test_reprocessing.py
+++ b/onionperf/tests/test_reprocessing.py
@@ -45,7 +45,7 @@ def test_log_match_no_log_date():
 def test_log_match_with_filter_date():
     tgen_logs = reprocessing.collect_logs(DATA_DIR, '*tgen.log')
     torctl_logs = reprocessing.collect_logs(DATA_DIR, '*torctl.log')
-    test_date = datetime.date(2019, 01, 10)
+    test_date = datetime.date(2019, 1, 10)
     log_pairs =  reprocessing.match(tgen_logs, torctl_logs, test_date)
     well_known_list = [(DATA_DIR + 'logs/onionperf_2019-01-10_23:59:59.tgen.log', DATA_DIR + 'logs/onionperf_2019-01-10_23:59:59.torctl.log', datetime.datetime(2019, 1, 10, 0, 0))]
     assert_equals(log_pairs, well_known_list)
@@ -53,7 +53,7 @@ def test_log_match_with_filter_date():
 def test_log_match_with_wrong_filter_date():
     tgen_logs = reprocessing.collect_logs(DATA_DIR, '*tgen.log')
     torctl_logs = reprocessing.collect_logs(DATA_DIR, '*torctl.log')
-    test_date = datetime.date(2017, 01, 01)
+    test_date = datetime.date(2017, 1, 1)
     log_pairs =  reprocessing.match(tgen_logs, torctl_logs, test_date)
     well_known_list = []
     assert_equals(log_pairs, well_known_list)
diff --git a/onionperf/tests/test_utils.py b/onionperf/tests/test_utils.py
index 71056ff..497c97e 100644
--- a/onionperf/tests/test_utils.py
+++ b/onionperf/tests/test_utils.py
@@ -95,7 +95,7 @@ def test_find_path_with_which():
     """
 
     temp_file = tempfile.NamedTemporaryFile()
-    os.chmod(temp_file.name, 0775)
+    os.chmod(temp_file.name, 0o775)
     work_path = util.find_path(None, temp_file.name, tempfile.tempdir)
     assert_equals(work_path, temp_file.name)
     temp_file.close()
@@ -252,7 +252,7 @@ def test_file_writable():
     test_writable.write("onionperf")
     test_writable.close()
     expected_checksum = "5001ed4ab25b52543946fa63da829d4eeab1bd254c89ffdad0877186e074b385"
-    with open(temp_file.name) as f:
+    with open(temp_file.name, 'rb') as f:
         file_bytes = f.read()
         file_checksum = hashlib.sha256(file_bytes).hexdigest()
     assert_equals(file_checksum, expected_checksum)
@@ -270,8 +270,8 @@ def test_file_writable_compressed():
     test_writable = util.FileWritable(temp_file.name, True)
     test_writable.write("onionperf")
     test_writable.close()
-    expected_checksum = "66a6256bc4b04529c7123fa9573d30de659ffaa0cce1cc9b189817c8bf30e813"
-    with open(temp_file.name) as f:
+    expected_checksum = "3556b3bee6bb56d0a42676cbbf5784ebe4151fe65b0797f42260f93212e2df11"
+    with open(temp_file.name, 'rb') as f:
         file_bytes = f.read()
         file_checksum = hashlib.sha256(file_bytes).hexdigest()
     assert_equals(file_checksum, expected_checksum)
diff --git a/onionperf/util.py b/onionperf/util.py
index d481150..7c8e80f 100644
--- a/onionperf/util.py
+++ b/onionperf/util.py
@@ -4,10 +4,9 @@
   See LICENSE for licensing information
 '''
 
-import sys, os, socket, logging, random, re, shutil, datetime, urllib, gzip
-from subprocess import Popen, PIPE, STDOUT
+import sys, os, socket, logging, random, re, shutil, datetime, urllib.request, urllib.parse, urllib.error, gzip, lzma
 from threading import Lock
-from cStringIO import StringIO
+from io import StringIO
 from abc import ABCMeta, abstractmethod
 
 LINEFORMATS = "k-,r-,b-,g-,c-,m-,y-,k--,r--,b--,g--,c--,m--,y--,k:,r:,b:,g:,c:,m:,y:,k-.,r-.,b-.,g-.,c-.,m-.,y-."
@@ -156,7 +155,7 @@ def get_ip_address():
     """
     ip_address = None
     try:
-        data = urllib.urlopen('https://check.torproject.org/').read()
+        data = urllib.request.urlopen('https://check.torproject.org/').read().decode('utf-8')
         ip_address = find_ip_address_url(data)
         if not ip_address:
             logging.error(
@@ -195,18 +194,14 @@ class DataSource(object):
         self.filename = filename
         self.compress = compress
         self.source = None
-        self.xzproc = None
 
     def __iter__(self):
         if self.source is None:
             self.open()
         return self.source
 
-    def next(self):
-        return self.__next__()
-
-    def __next__(self):  # python 3
-        return self.source.next() if self.source is not None else None
+    def __next__(self):
+        return next(self.source) if self.source is not None else None
 
     def open(self):
         if self.source is None:
@@ -214,14 +209,12 @@ class DataSource(object):
                 self.source = sys.stdin
             elif self.compress or self.filename.endswith(".xz"):
                 self.compress = True
-                cmd = "xz --decompress --stdout {0}".format(self.filename)
-                xzproc = Popen(cmd.split(), stdout=PIPE)
-                self.source = xzproc.stdout
+                self.source = lzma.open(self.filename, mode='rt')
             elif self.filename.endswith(".gz"):
                 self.compress = True
-                self.source = gzip.open(self.filename, 'rb')
+                self.source = gzip.open(self.filename, 'rt')
             else:
-                self.source = open(self.filename, 'r')
+                self.source = open(self.filename, 'rt')
 
     def get_file_handle(self):
         if self.source is None:
@@ -230,12 +223,9 @@ class DataSource(object):
 
     def close(self):
         if self.source is not None: self.source.close()
-        if self.xzproc is not None: self.xzproc.wait()
-
 
-class Writable(object):
-    __metaclass__ = ABCMeta
 
+class Writable(object, metaclass=ABCMeta):
     @abstractmethod
     def write(self, msg):
         pass
@@ -251,8 +241,6 @@ class FileWritable(Writable):
         self.do_compress = do_compress
         self.do_truncate = do_truncate
         self.file = None
-        self.xzproc = None
-        self.ddproc = None
         self.lock = Lock()
 
         if self.filename == '-':
@@ -275,14 +263,9 @@ class FileWritable(Writable):
 
     def __open_nolock(self):
         if self.do_compress:
-            self.xzproc = Popen("xz --threads=3 -".split(), stdin=PIPE, stdout=PIPE)
-            dd_cmd = "dd of={0}".format(self.filename)
-            # # note: its probably not a good idea to append to finalized compressed files
-            # if not self.do_truncate: dd_cmd += " oflag=append conv=notrunc"
-            self.ddproc = Popen(dd_cmd.split(), stdin=self.xzproc.stdout, stdout=open(os.devnull, 'w'), stderr=STDOUT)
-            self.file = self.xzproc.stdin
+            self.file = lzma.open(self.filename, mode='wt')
         else:
-            self.file = open(self.filename, 'w' if self.do_truncate else 'a', 0)
+            self.file = open(self.filename, 'wt' if self.do_truncate else 'at', 1)
 
     def close(self):
         self.lock.acquire()
@@ -293,12 +276,6 @@ class FileWritable(Writable):
         if self.file is not None:
             self.file.close()
             self.file = None
-        if self.xzproc is not None:
-            self.xzproc.wait()
-            self.xzproc = None
-        if self.ddproc is not None:
-            self.ddproc.wait()
-            self.ddproc = None
 
     def rotate_file(self, filename_datetime=datetime.datetime.now()):
         self.lock.acquire()
@@ -316,7 +293,7 @@ class FileWritable(Writable):
         self.__close_nolock()
         with open(self.filename, 'rb') as f_in, gzip.open(new_filename, 'wb') as f_out:
             shutil.copyfileobj(f_in, f_out)
-        with open(self.filename, 'a') as f_in:
+        with open(self.filename, 'ab') as f_in:
             f_in.truncate(0)
         self.__open_nolock()
 
diff --git a/onionperf/visualization.py b/onionperf/visualization.py
index c673617..a5dde54 100644
--- a/onionperf/visualization.py
+++ b/onionperf/visualization.py
@@ -46,9 +46,7 @@ pylab.rcParams.update({
 })
 '''
 
-class Visualization(object):
-
-    __metaclass__ = ABCMeta
+class Visualization(object, metaclass=ABCMeta):
 
     def __init__(self):
         self.datasets = []
@@ -349,7 +347,7 @@ class TGenVisualization(Visualization):
                         if client not in dls[bytes]: dls[bytes][client] = 0
                         for sec in d["time_to_last_byte"][b]: dls[bytes][client] += len(d["time_to_last_byte"][b][sec])
             for bytes in dls:
-                x, y = getcdf(dls[bytes].values(), shownpercentile=1.0)
+                x, y = getcdf(list(dls[bytes].values()), shownpercentile=1.0)
                 pylab.figure(figs[bytes].number)
                 pylab.plot(x, y, lineformat, label=label)
 
@@ -555,7 +553,7 @@ def getcdf(data, shownpercentile=0.99, maxpoints=10000.0):
     frac = cf(data)
     k = len(data) / maxpoints
     x, y, lasty = [], [], 0.0
-    for i in xrange(int(round(len(data) * shownpercentile))):
+    for i in range(int(round(len(data) * shownpercentile))):
         if i % k > 1.0: continue
         assert not numpy.isnan(data[i])
         x.append(data[i])
diff --git a/run_tests.sh b/run_tests.sh
index d45032c..7a30e61 100755
--- a/run_tests.sh
+++ b/run_tests.sh
@@ -1,3 +1,3 @@
 #!/bin/sh
 
-PYTHONPATH=. python -m nose --with-coverage --cover-package=onionperf
+PYTHONPATH=. python3 -m nose --with-coverage --cover-package=onionperf
diff --git a/setup.py b/setup.py
index 07e46a6..41bba2f 100644
--- a/setup.py
+++ b/setup.py
@@ -1,4 +1,4 @@
-#!/usr/bin/env python
+#!/usr/bin/env python3
 
 from distutils.core import setup
 



_______________________________________________
tor-commits mailing list
tor-commits@xxxxxxxxxxxxxxxxxxxx
https://lists.torproject.org/cgi-bin/mailman/listinfo/tor-commits