[Author Prev][Author Next][Thread Prev][Thread Next][Author Index][Thread Index]
[tor-commits] [onionperf/develop] Adds support for previous analyses
commit 074842a412c2d7ede45a40eb4fb610cd32619c11
Author: Ana Custura <ana@xxxxxxxxxxxxxx>
Date: Sat Jul 11 11:38:54 2020 +0100
Adds support for previous analyses
---
onionperf/analysis.py | 6 ++++
onionperf/visualization.py | 72 ++++++++++++++++++++++++++++++++--------------
2 files changed, 57 insertions(+), 21 deletions(-)
diff --git a/onionperf/analysis.py b/onionperf/analysis.py
index 53d879c..49d6dba 100644
--- a/onionperf/analysis.py
+++ b/onionperf/analysis.py
@@ -100,6 +100,12 @@ class OPAnalysis(Analysis):
except:
return None
+ def get_tgen_transfers(self, node):
+ try:
+ return self.json_db['data'][node]['tgen']['transfers']
+ except:
+ return None
+
@classmethod
def load(cls, filename="onionperf.analysis.json.xz", input_prefix=os.getcwd()):
filepath = os.path.abspath(os.path.expanduser("{0}".format(filename)))
diff --git a/onionperf/visualization.py b/onionperf/visualization.py
index 48c837b..68ad751 100644
--- a/onionperf/visualization.py
+++ b/onionperf/visualization.py
@@ -54,32 +54,62 @@ class TGenVisualization(Visualization):
streams = []
for (analyses, label) in self.datasets:
for analysis in analyses:
- for client in analysis.get_nodes():
- tgen_streams = analysis.get_tgen_streams(client)
- for stream_id, stream_data in tgen_streams.items():
- stream = {"stream_id": stream_id, "label": label,
- "filesize_bytes": stream_data["stream_info"]["recvsize"]}
- stream["server"] = "onion" if ".onion:" in stream_data["transport_info"]["remote"] else "public"
- if "time_info" in stream_data:
- s = stream_data["time_info"]
- if "payload_progress" in s:
+ if analysis.json_db['version'] >= '3':
+ for client in analysis.get_nodes():
+ tgen_streams = analysis.get_tgen_streams(client)
+ for stream_id, stream_data in tgen_streams.items():
+ stream = {"stream_id": stream_id, "label": label,
+ "filesize_bytes": int(stream_data["stream_info"]["recvsize"])}
+ stream["server"] = "onion" if ".onion:" in stream_data["transport_info"]["remote"] else "public"
+ if "time_info" in stream_data:
+ s = stream_data["time_info"]
+ if "usecs-to-first-byte-recv" in s:
+ stream["time_to_first_byte"] = float(s["usecs-to-first-byte-recv"])/1000000
+ if "usecs-to-last-byte-recv" in s:
+ stream["time_to_last_byte"] = float(s["usecs-to-last-byte-recv"])/1000000
+ if "elapsed_seconds" in stream_data:
+ s = stream_data["elapsed_seconds"]
+ # Explanation of the math below for computing Mbps: From filesize_bytes
+ # and payload_progress fields we can compute the number of seconds that
+ # have elapsed between receiving bytes 524,288 and 1,048,576, which is a
+ # total amount of 524,288 bytes or 4,194,304 bits or 4.194304 megabits.
+ # We want the reciprocal of that value with unit megabits per second.
+ if stream_data["stream_info"]["recvsize"] == 5242880 and "0.2" in s["payload_progress_recv"]:
+ stream["mbps"] = 4.194304 / (s["payload_progress_recv"]["0.2"] - s["payload_progress_recv"]["0.1"])
+ if "error" in stream_data["transport_info"] and stream_data["transport_info"]["error"] != "NONE":
+ stream["error_code"] = stream_data["transport_info"]["error"]
+ if "unix_ts_start" in stream_data:
+ stream["start"] = datetime.datetime.utcfromtimestamp(stream_data["unix_ts_start"])
+ streams.append(stream)
+ else:
+ for client in analysis.get_nodes():
+ tgen_transfers = analysis.get_tgen_transfers(client)
+ for transfer_id, transfer_data in tgen_transfers.items():
+ stream = {"stream_id": transfer_id, "label": label,
+ "filesize_bytes": transfer_data["filesize_bytes"]}
+ stream["server"] = "onion" if ".onion:" in transfer_data["endpoint_remote"] else "public"
+ if "elapsed_seconds" in transfer_data:
+ s = transfer_data["elapsed_seconds"]
+ if "payload_progress" in s:
# Explanation of the math below for computing Mbps: From filesize_bytes
# and payload_progress fields we can compute the number of seconds that
# have elapsed between receiving bytes 524,288 and 1,048,576, which is a
# total amount of 524,288 bytes or 4,194,304 bits or 4.194304 megabits.
# We want the reciprocal of that value with unit megabits per second.
- if stream_data["stream_info"]["recv_size"] == 5242880 and "0.2" in s["elapsed_seconds"]["payload_progress_recv"]:
- stream["mbps"] = 4.194304 / (s["elapsed_seconds"]["payload_progress_recv"]["0.2"] - s["elapsed_seconds"]["payload_progress_recv"]["0.1"])
-
- if "usecs-to-first-byte-recv" in s:
- stream["time_to_first_byte"] = float(s["usecs-to-first-byte-recv"])/1000000
- if "usecs-to-last-byte-recv" in s:
- stream["time_to_last_byte"] = float(s["usecs-to-last-byte-recv"])/1000000
- if "error" in stream_data["transport_info"] and stream_data["transport_info"]["error"] != "NONE":
- stream["error_code"] = stream_data["transport_info"]["error"]
- if "unix_ts_start" in stream_data:
- stream["start"] = datetime.datetime.utcfromtimestamp(stream_data["unix_ts_start"])
- streams.append(stream)
+ if transfer_data["filesize_bytes"] == 1048576 and "1.0" in s["payload_progress"]:
+ stream["mbps"] = 4.194304 / (s["payload_progress"]["1.0"] - s["payload_progress"]["0.5"])
+ if transfer_data["filesize_bytes"] == 5242880 and "0.2" in s["payload_progress"]:
+ stream["mbps"] = 4.194304 / (s["payload_progress"]["0.2"] - s["payload_progress"]["0.1"])
+ if "first_byte" in s:
+ stream["time_to_first_byte"] = s["first_byte"]
+ if "last_byte" in s:
+ stream["time_to_last_byte"] = s["last_byte"]
+ if "error_code" in transfer_data and transfer_data["error_code"] != "NONE":
+ stream["error_code"] = transfer_data["error_code"]
+ if "unix_ts_start" in transfer_data:
+ stream["start"] = datetime.datetime.utcfromtimestamp(transfer_data["unix_ts_start"])
+ streams.append(stream)
+
self.data = pd.DataFrame.from_records(streams, index="stream_id")
def __plot_firstbyte_ecdf(self):
_______________________________________________
tor-commits mailing list
tor-commits@xxxxxxxxxxxxxxxxxxxx
https://lists.torproject.org/cgi-bin/mailman/listinfo/tor-commits