[Author Prev][Author Next][Thread Prev][Thread Next][Author Index][Thread Index]

[tor-commits] [sbws/master] Complete info about required disk space



commit 1e916ea2452d499dacae9ac1e64385cd73f10f20
Author: juga0 <juga@xxxxxxxxxx>
Date:   Thu Sep 6 16:55:51 2018 +0000

    Complete info about required disk space
---
 sbws/util/fs.py | 77 +++++++++++++++++++++++++++++++++++++++++++++------------
 1 file changed, 61 insertions(+), 16 deletions(-)

diff --git a/sbws/util/fs.py b/sbws/util/fs.py
index c7a5de7..0095f2a 100644
--- a/sbws/util/fs.py
+++ b/sbws/util/fs.py
@@ -5,37 +5,82 @@ import shutil
 
 log = logging.getLogger(__name__)
 
+DISK_SPACE_TEXT = """
+Disk space requirements
+-----------------------
+v3bw files: the maximum space required is ~{mb_bw} MB, after {d_bw} days.
+result files: the maximum space required is ~{mb_results} MB, after {d_r} days.
+tor directory: the space required is ~{mb_tor} MB.
+code and depenencies: the space required is ~{mb_code} MB
+Total disk space required is: ~{mb_total} MB
+"""
+
 
 def sbws_required_disk_space(conf):
     """Disk space required by sbws files.
     Rough calculations.
 
     :param ConfigParser conf: sbws configuration
-    :returns: int, size in MiB
+    :returns: int, size in MB
     """
+    text_dict = {}
     # Number of relays per line average size in Bytes
     size_v3bw_file = 7500 * 220
-    # the minimum number of required v3bw files
-    num_v3bw_files = 2
+    # default crontab configuration will run genenerate every hour
+    num_v3bw_files_day = 24
     # ~1000 is the length of a line when the result is successfull
     # ~4550 is the number of lines of the biggest result file
     size_result_file = 4550 * 1000
-    num_result_files = conf.getint('general', 'data_period')
-    # not counting compressed files
-    space_v3bw_files = size_v3bw_file * num_v3bw_files
-    space_result_files = size_result_file * num_result_files
+    num_result_files_day = 1
+    space_v3bw_files_day = size_v3bw_file * num_v3bw_files_day
+    space_result_files_day = size_result_file * num_result_files_day
+    size_compressed_files = 600 * 1024
+    # default crontab configuration will run cleanup once a day
+    # default cleanup configuration will compress v3bw files after 1 day
+    # and delete them after 7 days
+    v3bw_compress_after_days = conf.getint('cleanup',
+                                           'v3bw_files_compress_after_days')
+    v3bw_delete_after_days = conf.getint('cleanup',
+                                         'v3bw_files_delete_after_days')
+    v3bw_max_space_after_delete = \
+        (space_v3bw_files_day * v3bw_compress_after_days) + \
+        (size_compressed_files * num_v3bw_files_day * v3bw_delete_after_days)
+    text_dict['mb_bw'] = round(v3bw_max_space_after_delete / 1000 ** 2)
+    text_dict['d_bw'] = v3bw_delete_after_days
+    # default crontab configuration will run cleanup once a day
+    # default cleanup configuration will compress v3bw files after 1 day
+    # and delete them after 7 days
+    results_compress_after_days = conf.getint('cleanup',
+                                              'data_files_compress_after_days')
+    results_delete_after_days = conf.getint('cleanup',
+                                            'data_files_delete_after_days')
+    results_max_space_after_delete = \
+        (space_result_files_day * results_compress_after_days) + \
+        (size_compressed_files * num_v3bw_files_day *
+         results_delete_after_days)
+    text_dict['mb_results'] = round(results_max_space_after_delete / 1000 ** 2)
+    text_dict['d_r'] = results_delete_after_days
     # not counted rotated files and assuming that when it is not rotated the
     # size will be aproximately 10MiB
-    size_log_file = conf.getint('logging', 'to_file_max_bytes') or 10485760 \
-        if conf.getboolean('logging', 'to_file') else 0
+    space_log_files = 0
+    if conf.getboolean('logging', 'to_file'):
+        size_log_file = conf.getint('logging', 'to_file_max_bytes')
+        num_log_files = conf.geting('logging', 'to_file_num_backups')
+        space_log_files = size_log_file * num_log_files
+    text_dict['mb_log'] = space_log_files
     # roughly, size of a current tor dir
-    space_tor_dir = 19828000
-    # duplicate everything to warn early
-    size_total = (space_v3bw_files + size_log_file + space_result_files +
-                  space_tor_dir) * 2
-    # convert to MiB
-    size_total_mb = round(size_total / (1024 ** 2))
-    return size_total_mb
+    size_tor_dir = 19828000
+    text_dict['mb_tor'] = round(size_tor_dir / 1000 ** 2)
+    # roughly, the size of this code and dependencies
+    size_code_deps = 2097152
+    text_dict['mb_code'] = round(size_code_deps / 1000 ** 2)
+    # Multiply per 2, just in case
+    size_total = (results_max_space_after_delete +
+                  v3bw_max_space_after_delete + space_log_files + size_tor_dir +
+                  size_code_deps) * 2
+    text_dict['mb_total'] = round(size_total / 1000 ** 2)
+    space_text = DISK_SPACE_TEXT.format(**text_dict)
+    return space_text
 
 
 def df(path):



_______________________________________________
tor-commits mailing list
tor-commits@xxxxxxxxxxxxxxxxxxxx
https://lists.torproject.org/cgi-bin/mailman/listinfo/tor-commits