[Author Prev][Author Next][Thread Prev][Thread Next][Author Index][Thread Index]
[tor-commits] [onionperf/master] Removes torperf export function in analysis.py
commit b411bd32760a12de25d406f870d903f223c1e763
Author: Ana Custura <ana@xxxxxxxxxxxxxx>
Date: Thu May 14 14:50:15 2020 +0100
Removes torperf export function in analysis.py
---
onionperf/analysis.py | 143 --------------------------------------------------
1 file changed, 143 deletions(-)
diff --git a/onionperf/analysis.py b/onionperf/analysis.py
index dfdf2a6..83f993d 100644
--- a/onionperf/analysis.py
+++ b/onionperf/analysis.py
@@ -150,149 +150,6 @@ class Analysis(object):
analysis_instance.json_db = db
return analysis_instance
- def export_torperf_version_1_1(self, output_prefix=os.getcwd(), do_compress=False):
- # export file in `@type torperf 1.0` format: https://collector.torproject.org/#type-torperf
- if not os.path.exists(output_prefix):
- os.makedirs(output_prefix)
-
- for nickname in self.json_db['data']:
- if 'tgen' not in self.json_db['data'][nickname] or 'transfers' not in self.json_db['data'][nickname]['tgen']:
- continue
-
- xfers_by_filesize = {}
- for xfer_db in list(self.json_db['data'][nickname]['tgen']['transfers'].values()):
- xfers_by_filesize.setdefault(xfer_db['filesize_bytes'], []).append(xfer_db)
-
- streams_by_srcport, circuits = {}, []
- if 'tor' in self.json_db['data'][nickname]:
- if 'streams' in self.json_db['data'][nickname]['tor']:
- for streams_db in list(self.json_db['data'][nickname]['tor']['streams'].values()):
- if 'source' in streams_db:
- srcport = int(streams_db['source'].split(':')[1])
- streams_by_srcport[srcport] = streams_db
- if 'circuits' in self.json_db['data'][nickname]['tor']:
- circuits = self.json_db['data'][nickname]['tor']['circuits']
-
- for filesize in xfers_by_filesize:
- # build the filename
- filename_prefix = "{}-{}".format(nickname, filesize)
- filename_middle = "-{}".format(util.date_to_string(self.date_filter)) if self.date_filter is not None else ""
- filename_suffix = ".tpf.xz" if do_compress else ".tpf"
- filename = "{}{}{}".format(filename_prefix, filename_middle, filename_suffix)
-
- filepath = "{}/{}".format(output_prefix, filename)
-
- logging.info("saving analysis results to {0}".format(filepath))
-
- # always append instead of truncating file
- output = util.FileWritable(filepath, do_compress=do_compress, do_truncate=False)
- output.open()
-
- for xfer_db in xfers_by_filesize[filesize]:
- # if any keys are missing, log a warning
- try:
- d = {}
-
- d['SOURCE'] = nickname
- d['SOURCEADDRESS'] = self.json_db['data'][nickname]['measurement_ip']
- d['ENDPOINTLOCAL'] = xfer_db['endpoint_local']
- d['ENDPOINTPROXY'] = xfer_db['endpoint_proxy']
- d['ENDPOINTREMOTE'] = xfer_db['endpoint_remote']
- d['HOSTNAMELOCAL'] = xfer_db['hostname_local']
- d['HOSTNAMEREMOTE'] = xfer_db['hostname_remote']
-
- d['FILESIZE'] = xfer_db['filesize_bytes']
- d['READBYTES'] = xfer_db['total_bytes_read']
- d['WRITEBYTES'] = xfer_db['total_bytes_write']
- d['DIDTIMEOUT'] = 0
-
- def ts_to_str(ts): return"{0:.02f}".format(ts)
-
- # initialize times to 0.0
- time_keys = ['START', 'SOCKET', 'CONNECT', 'NEGOTIATE', 'REQUEST', 'RESPONSE', 'DATAREQUEST', 'DATARESPONSE', 'DATACOMPLETE', 'LAUNCH']
- for i in range(1, 10):
- time_keys.append('DATAPERC{}0'.format(i))
- for key in time_keys:
- d[key] = 0.0
-
- # since these are initialized to 0, it's OK if we are missing some times, e.g. due to read error
- if 'unix_ts_start' in xfer_db:
-
- # if we need to filter by date and the download did not start on that date, skip it
- if self.date_filter is not None:
- start_datetime = datetime.datetime.utcfromtimestamp(xfer_db['unix_ts_start'])
- if start_datetime is not None:
- if not util.do_dates_match(self.date_filter, start_datetime.date()):
- logging.info("skipping download because start date {} does not match filter date {}".format(util.date_to_string(start_datetime.date()), util.date_to_string(self.date_filter)))
- continue
-
- d['START'] = ts_to_str(xfer_db['unix_ts_start'])
- if 'elapsed_seconds' in xfer_db:
- if 'socket_create' in xfer_db['elapsed_seconds']:
- d['SOCKET'] = ts_to_str(xfer_db['unix_ts_start'] + xfer_db['elapsed_seconds']['socket_create'])
- if 'socket_connect' in xfer_db['elapsed_seconds']:
- d['CONNECT'] = ts_to_str(xfer_db['unix_ts_start'] + xfer_db['elapsed_seconds']['socket_connect'])
- if 'proxy_choice' in xfer_db['elapsed_seconds']:
- d['NEGOTIATE'] = ts_to_str(xfer_db['unix_ts_start'] + xfer_db['elapsed_seconds']['proxy_choice'])
- if 'proxy_request' in xfer_db['elapsed_seconds']:
- d['REQUEST'] = ts_to_str(xfer_db['unix_ts_start'] + xfer_db['elapsed_seconds']['proxy_request'])
- if 'proxy_response' in xfer_db['elapsed_seconds']:
- d['RESPONSE'] = ts_to_str(xfer_db['unix_ts_start'] + xfer_db['elapsed_seconds']['proxy_response'])
- if 'command' in xfer_db['elapsed_seconds']:
- d['DATAREQUEST'] = ts_to_str(xfer_db['unix_ts_start'] + xfer_db['elapsed_seconds']['command'])
- if 'response' in xfer_db['elapsed_seconds']:
- d['DATARESPONSE'] = ts_to_str(xfer_db['unix_ts_start'] + xfer_db['elapsed_seconds']['response'])
-
- if 'payload_progress' in xfer_db['elapsed_seconds']:
- # set DATAPERC[10,20,...,90]
- for decile in sorted(xfer_db['elapsed_seconds']['payload_progress'].keys()):
- if decile in xfer_db['elapsed_seconds']['payload_progress'] and xfer_db['elapsed_seconds']['payload_progress'][decile] is not None:
- decile_as_int = int(float(decile) * 100)
- d['DATAPERC{0}'.format(decile_as_int)] = ts_to_str(xfer_db['unix_ts_start'] + xfer_db['elapsed_seconds']['payload_progress'][decile])
- else:
- for i in range(1, 10):
- d.pop('DATAPERC{}0'.format(i))
-
- if 'last_byte' in xfer_db['elapsed_seconds']:
- d['DATACOMPLETE'] = ts_to_str(xfer_db['unix_ts_start'] + xfer_db['elapsed_seconds']['last_byte'])
-
- if xfer_db['is_error']:
- d['DIDTIMEOUT'] = 1 # could be ioerror or timeout or etc, torperf did not distinguish
- d['ERRORCODE'] = xfer_db['error_code'] # this field is onionperf specific
-
- # now get the tor parts
- srcport = int(xfer_db['endpoint_local'].split(':')[2])
- if srcport in streams_by_srcport:
- stream_db = streams_by_srcport[srcport]
- if 'failure_reason_local' in list(stream_db.keys()):
- d['ERRORCODE'] += '_' + stream_db['failure_reason_local']
- if 'failure_reason_remote' in list(stream_db.keys()):
- d['ERRORCODE'] += '_' + stream_db['failure_reason_remote']
- circid = int(stream_db['circuit_id'] or 0)
- if circid in circuits:
- circuit_db = circuits[circid]
-
- d['LAUNCH'] = circuit_db['unix_ts_start']
- d['PATH'] = ','.join([item[0].split('~')[0] for item in circuit_db['path']])
- d['BUILDTIMES'] = ','.join([str(item[1]) for item in circuit_db['path']])
- d['TIMEOUT'] = circuit_db['build_timeout'] if 'build_timeout' in circuit_db else None
- d['QUANTILE'] = circuit_db['build_quantile'] if 'build_quantile' in circuit_db else None
- d['CIRC_ID'] = circid
- d['USED_AT'] = stream_db['unix_ts_end']
- d['USED_BY'] = int(stream_db['stream_id'])
- if 'ERRORCODE' in list(d.keys()):
- d['ERRORCODE'] = ERRORS[d['ERRORCODE']]
-
- output.write("@type torperf 1.1\r\n")
- output_str = ' '.join("{0}={1}".format(k, d[k]) for k in sorted(d.keys()) if d[k] is not None).strip()
- output.write("{0}\r\n".format(output_str))
- except KeyError as e:
- logging.warning("KeyError while exporting torperf file, missing key '{0}', skipping transfer '{1}'".format(str(e), xfer_db['transfer_id']))
- continue
-
- output.close()
- logging.info("done!")
-
def subproc_analyze_func(analysis_args):
signal(SIGINT, SIG_IGN) # ignore interrupts
a = analysis_args[0]
_______________________________________________
tor-commits mailing list
tor-commits@xxxxxxxxxxxxxxxxxxxx
https://lists.torproject.org/cgi-bin/mailman/listinfo/tor-commits