Skip to content
This repository has been archived by the owner on May 4, 2021. It is now read-only.

Bug29047 #330

Open
wants to merge 3 commits into
base: master
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 3 additions & 1 deletion docs/source/conf.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,8 @@
# documentation root, use os.path.abspath to make it absolute, like shown here.
#
import os


def find_version():
with open(os.path.join("..", "..", "sbws", "__init__.py")) as fp:
for line in fp:
Expand Down Expand Up @@ -197,5 +199,5 @@ def find_version():
todo_include_todos = True

source_parsers = {
'.md': 'recommonmark.parser.CommonMarkParser',
'.md': 'recommonmark.parser.CommonMarkParser',
}
25 changes: 12 additions & 13 deletions sbws/core/cleanup.py
Original file line number Diff line number Diff line change
@@ -1,28 +1,27 @@
"""Util functions to cleanup disk space."""
import gzip
import logging
import os
import shutil
import time
import types
from argparse import ArgumentDefaultsHelpFormatter
from datetime import datetime, timedelta

from sbws.util.filelock import DirectoryLock
from sbws.globals import fail_hard
from sbws.util.filelock import DirectoryLock
from sbws.util.timestamp import unixts_to_dt_obj
from argparse import ArgumentDefaultsHelpFormatter
from datetime import datetime
from datetime import timedelta
import os
import gzip
import shutil
import logging
import time

log = logging.getLogger(__name__)


def gen_parser(sub):
'''
"""
Helper function for the broader argument parser generating code that adds
in all the possible command line arguments for the cleanup command.

:param argparse._SubParsersAction sub: what to add a sub-parser to
'''
"""
d = 'Compress and delete results and/or v3bw files old files.' \
'Configuration options are read to determine which are old files'
p = sub.add_parser('cleanup', description=d,
Expand Down Expand Up @@ -168,12 +167,12 @@ def _clean_result_files(args, conf):


def main(args, conf):
'''
"""
Main entry point in to the cleanup command.

:param argparse.Namespace args: command line arguments
:param configparser.ConfigParser conf: parsed config files
'''
"""
datadir = conf.getpath('paths', 'datadir')
if not os.path.isdir(datadir):
fail_hard('%s does not exist', datadir)
Expand Down
14 changes: 7 additions & 7 deletions sbws/core/generate.py
Original file line number Diff line number Diff line change
@@ -1,13 +1,13 @@
import logging
import os
from argparse import ArgumentDefaultsHelpFormatter
from math import ceil

from sbws.globals import (fail_hard, SBWS_SCALE_CONSTANT, TORFLOW_SCALING,
SBWS_SCALING, TORFLOW_BW_MARGIN, PROP276_ROUND_DIG,
DAY_SECS, NUM_MIN_RESULTS)
from sbws.lib.v3bwfile import V3BWFile
from sbws.globals import (DAY_SECS, NUM_MIN_RESULTS, PROP276_ROUND_DIG,
SBWS_SCALE_CONSTANT, SBWS_SCALING, TORFLOW_BW_MARGIN,
TORFLOW_SCALING, fail_hard)
from sbws.lib.resultdump import load_recent_results_in_datadir
from argparse import ArgumentDefaultsHelpFormatter
import os
import logging
from sbws.lib.v3bwfile import V3BWFile
from sbws.util.timestamp import now_fname

log = logging.getLogger(__name__)
Expand Down
59 changes: 30 additions & 29 deletions sbws/core/scanner.py
Original file line number Diff line number Diff line change
@@ -1,36 +1,37 @@
''' Measure the relays. '''
"""Measure the relays. """

from ..lib.circuitbuilder import GapsCircuitBuilder as CB
from ..lib.resultdump import ResultDump
from ..lib.resultdump import ResultSuccess, ResultErrorCircuit
from ..lib.resultdump import ResultErrorStream
from ..lib.relaylist import RelayList
from ..lib.relayprioritizer import RelayPrioritizer
from ..lib.destination import DestinationList
from ..util.timestamp import now_isodt_str
from ..util.state import State
from sbws.globals import fail_hard
import sbws.util.stem as stem_utils
import sbws.util.requests as requests_utils
import logging
import os
import random
import time
from argparse import ArgumentDefaultsHelpFormatter
from multiprocessing.dummy import Pool
from threading import Event
import time
import os
import logging

import requests
import random

import sbws.util.requests as requests_utils
import sbws.util.stem as stem_utils
from sbws.globals import fail_hard

from ..lib.circuitbuilder import GapsCircuitBuilder as CB
from ..lib.destination import DestinationList
from ..lib.relaylist import RelayList
from ..lib.relayprioritizer import RelayPrioritizer
from ..lib.resultdump import (ResultDump, ResultErrorCircuit,
ResultErrorStream, ResultSuccess)
from ..util.state import State
from ..util.timestamp import now_isodt_str

rng = random.SystemRandom()
end_event = Event()
log = logging.getLogger(__name__)


def timed_recv_from_server(session, dest, byte_range):
''' Request the **byte_range** from the URL at **dest**. If successful,
"""Request the **byte_range** from the URL at **dest**. If successful,
return True and the time it took to download. Otherwise return False and an
exception. '''
exception. """
headers = {'Range': byte_range, 'Accept-Encoding': 'identity'}
start_time = time.time()
# TODO:
Expand All @@ -48,13 +49,13 @@ def timed_recv_from_server(session, dest, byte_range):


def get_random_range_string(content_length, size):
'''
"""
Return a random range of bytes of length **size**. **content_length** is
the size of the file we will be requesting a range of bytes from.

For example, for content_length of 100 and size 10, this function will
return one of the following: '0-9', '1-10', '2-11', [...] '89-98', '90-99'
'''
"""
assert size <= content_length
# start can be anywhere in the content_length as long as it is **size**
# bytes away from the end or more. Because range is [start, end) (doesn't
Expand All @@ -72,15 +73,15 @@ def get_random_range_string(content_length, size):


def measure_rtt_to_server(session, conf, dest, content_length):
''' Make multiple end-to-end RTT measurements by making small HTTP requests
"""Make multiple end-to-end RTT measurements by making small HTTP requests
over a circuit + stream that should already exist, persist, and not need
rebuilding. If something goes wrong and not all of the RTT measurements can
be made, return None. Otherwise return a list of the RTTs (in seconds).

:returns tuple: results or None if the if the measurement fail.
None or exception if the measurement fail.

'''
"""
rtts = []
size = conf.getint('scanner', 'min_download_size')
for _ in range(0, conf.getint('scanner', 'num_rtts')):
Expand Down Expand Up @@ -141,11 +142,11 @@ def measure_bandwidth_to_server(session, conf, dest, content_length):


def _pick_ideal_second_hop(relay, dest, rl, cont, is_exit):
'''
"""
Sbws builds two hop circuits. Given the **relay** to measure with
destination **dest**, pick a second relay that is or is not an exit
according to **is_exit**.
'''
"""
candidates = rl.exits_not_bad_allowing_port(dest.port) if is_exit \
else rl.non_exits
if not len(candidates):
Expand Down Expand Up @@ -313,17 +314,17 @@ def _next_expected_amount(expected_amount, result_time, download_times,


def result_putter(result_dump):
''' Create a function that takes a single argument -- the measurement
result -- and return that function so it can be used by someone else '''
"""Create a function that takes a single argument -- the measurement
result -- and return that function so it can be used by someone else """
def closure(measurement_result):
return result_dump.queue.put(measurement_result)
return closure


def result_putter_error(target):
''' Create a function that takes a single argument -- an error from a
"""Create a function that takes a single argument -- an error from a
measurement -- and return that function so it can be used by someone else
'''
"""
def closure(err):
log.error('Unhandled exception caught while measuring %s: %s %s',
target.nickname, type(err), err)
Expand Down
46 changes: 22 additions & 24 deletions sbws/core/stats.py
Original file line number Diff line number Diff line change
@@ -1,16 +1,13 @@
from sbws.globals import fail_hard
from sbws.lib.resultdump import Result
from sbws.lib.resultdump import ResultError
from sbws.lib.resultdump import ResultErrorCircuit
from sbws.lib.resultdump import ResultErrorStream
from sbws.lib.resultdump import ResultSuccess
from sbws.lib.resultdump import load_recent_results_in_datadir
from argparse import ArgumentDefaultsHelpFormatter
import logging
import os
from datetime import datetime
from datetime import timedelta
from argparse import ArgumentDefaultsHelpFormatter
from datetime import datetime, timedelta
from statistics import mean
import logging

from sbws.globals import fail_hard
from sbws.lib.resultdump import (Result, ResultError, ResultErrorCircuit,
ResultErrorStream, ResultSuccess,
load_recent_results_in_datadir)

log = logging.getLogger(__name__)

Expand All @@ -32,7 +29,8 @@ def _print_stats_error_types(data):
continue
number = counts[count_type]
print('{}/{} ({:.2f}%) results were {}'.format(
number, counts['total'], 100*number/counts['total'], count_type))
number, counts['total'], 100 * number / counts['total'],
count_type))


def _result_type_per_relay(data, result_type):
Expand All @@ -43,10 +41,10 @@ def _result_type_per_relay(data, result_type):


def _get_box_plot_values(iterable):
''' Reutrn the min, q1, med, q1, and max of the input list or iterable.
"""Reutrn the min, q1, med, q1, and max of the input list or iterable.
This function is NOT perfect, and I think that's fine for basic statistical
needs. Instead of median, it will return low or high median. Same for q1
and q3. '''
and q3. """
if not isinstance(iterable, list):
iterable = list(iterable)
iterable.sort()
Expand All @@ -55,7 +53,7 @@ def _get_box_plot_values(iterable):
q1_idx = round(length / 4)
q3_idx = median_idx + q1_idx
return [iterable[0], iterable[q1_idx], iterable[median_idx],
iterable[q3_idx], iterable[length-1]]
iterable[q3_idx], iterable[length - 1]]


def _print_results_type_box_plot(data, result_type):
Expand All @@ -78,14 +76,14 @@ def _print_averages(data):


def _results_into_bandwidths(results, limit=5):
'''
"""
For all the given resutls, extract their download statistics and normalize
them into bytes/second bandwidths.

:param list results: list of :class:`sbws.list.resultdump.ResultSuccess`
:param int limit: The maximum number of bandwidths to return
:returns: list of up to `limit` bandwidths, with the largest first
'''
"""
downloads = []
for result in results:
assert isinstance(result, ResultSuccess)
Expand All @@ -95,14 +93,14 @@ def _results_into_bandwidths(results, limit=5):


def print_stats(args, data):
'''
"""
Called from main to print various statistics about the organized **data**
to stdout.

:param argparse.Namespace args: command line arguments
:param dict data: keyed by relay fingerprint, and with values of
:class:`sbws.lib.resultdump.Result` subclasses
'''
"""
results = []
for fp in data:
results.extend(data[fp])
Expand All @@ -127,20 +125,20 @@ def print_stats(args, data):
print(len(success_results), 'success results and',
len(error_results), 'error results')
print('The fastest download was {:.2f} KiB/s'.format(
fastest_transfer/1024))
fastest_transfer / 1024))
print('Results come from', first, 'to', last, 'over a period of',
duration)
if getattr(args, 'error_types', False) is True:
_print_stats_error_types(data)


def gen_parser(sub):
'''
"""
Helper function for the broader argument parser generating code that adds
in all the possible command line arguments for the stats command.

:param argparse._SubParsersAction sub: what to add a sub-parser to
'''
"""
d = 'Write some statistics about the data collected so far to stdout'
p = sub.add_parser('stats', formatter_class=ArgumentDefaultsHelpFormatter,
description=d)
Expand All @@ -149,12 +147,12 @@ def gen_parser(sub):


def main(args, conf):
'''
"""
Main entry point into the stats command.

:param argparse.Namespace args: command line arguments
:param configparser.ConfigParser conf: parsed config files
'''
"""

datadir = conf.getpath('paths', 'datadir')
if not os.path.isdir(datadir):
Expand Down
8 changes: 4 additions & 4 deletions sbws/globals.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
import os
import logging
import os

log = logging.getLogger(__name__)

Expand Down Expand Up @@ -55,21 +55,21 @@


def fail_hard(*a, **kw):
''' Log something ... and then exit as fast as possible '''
"""Log something ... and then exit as fast as possible """
log.critical(*a, **kw)
exit(1)


def touch_file(fname, times=None):
'''
"""
If **fname** exists, update its last access and modified times to now. If
**fname** does not exist, create it. If **times** are specified, pass them
to os.utime for use.

:param str fname: Name of file to update or create
:param tuple times: 2-tuple of floats for access time and modified time
respectively
'''
"""
log.debug('Touching %s', fname)
with open(fname, 'a') as fd:
os.utime(fd.fileno(), times=times)
Loading