Skip to content

Commit

Permalink
changes related to burst rate in logs
Browse files Browse the repository at this point in the history
  • Loading branch information
naitik-supraoracles committed Apr 16, 2024
1 parent 7a7a0b7 commit bc70a20
Show file tree
Hide file tree
Showing 5 changed files with 14 additions and 12 deletions.
2 changes: 1 addition & 1 deletion benchmark/benchmark/local.py
Original file line number Diff line number Diff line change
Expand Up @@ -121,7 +121,7 @@ def run(self, debug=False):

# Parse logs and return the parser.
Print.info('Parsing logs...')
return LogParser.process(PathMaker.logs_path(), faults=self.faults)
return LogParser.process(PathMaker.logs_path(), self.bench_parameters.burst, faults=self.faults)

except (subprocess.SubprocessError, ParseError) as e:
self._kill_nodes()
Expand Down
12 changes: 7 additions & 5 deletions benchmark/benchmark/logs.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,12 +14,12 @@ class ParseError(Exception):


class LogParser:
def __init__(self, clients, primaries, workers, faults=0):
def __init__(self, clients, primaries, workers, burst,faults=0):
inputs = [clients, primaries, workers]
assert all(isinstance(x, list) for x in inputs)
assert all(isinstance(x, str) for y in inputs for x in y)
assert all(x for x in inputs)

self.burst = burst
self.faults = faults
if isinstance(faults, int):
self.committee_size = len(primaries) + int(faults)
Expand Down Expand Up @@ -211,9 +211,11 @@ def result(self):
f' Committee size: {self.committee_size} node(s)\n'
f' Worker(s) per node: {self.workers} worker(s)\n'
f' Collocate primary and workers: {self.collocate}\n'
f' Input rate: {sum(self.rate):,} tx/s\n'
f' Input rate: {(sum(self.rate)*(int(1000/self.burst))):,} tx/s\n'
f' Transaction size: {self.size[0]:,} B\n'
f' Execution time: {round(duration):,} s\n'
f' Burst: {self.burst:,} s\n'

'\n'
f' Header size: {header_size:,} B\n'
f' Max header delay: {max_header_delay:,} ms\n'
Expand All @@ -240,7 +242,7 @@ def print(self, filename):
f.write(self.result())

@classmethod
def process(cls, directory, faults=0):
def process(cls, directory, burst, faults=0):
assert isinstance(directory, str)

clients = []
Expand All @@ -256,4 +258,4 @@ def process(cls, directory, faults=0):
with open(filename, 'r') as f:
workers += [f.read()]

return cls(clients, primaries, workers, faults=faults)
return cls(clients, primaries, workers, burst, faults=faults)
8 changes: 4 additions & 4 deletions benchmark/benchmark/remote.py
Original file line number Diff line number Diff line change
Expand Up @@ -272,7 +272,7 @@ def _run_single(self, rate, burst, committee, bench_parameters, debug=False):
sleep(ceil(duration / 20))
self.kill(hosts=hosts, delete_logs=False)

def _logs(self, committee, faults):
def _logs(self, committee, burst, faults):
# Delete local logs (if any).
cmd = CommandMaker.clean_logs()
subprocess.run([cmd], shell=True, stderr=subprocess.DEVNULL)
Expand Down Expand Up @@ -305,7 +305,7 @@ def _logs(self, committee, faults):

# Parse logs and return the parser.
Print.info('Parsing logs and computing performance...')
return LogParser.process(PathMaker.logs_path(), faults=faults)
return LogParser.process(PathMaker.logs_path(), burst, faults=faults)

def run(self, bench_parameters_dict, node_parameters_dict, debug=False):
assert isinstance(debug, bool)
Expand Down Expand Up @@ -345,7 +345,7 @@ def run(self, bench_parameters_dict, node_parameters_dict, debug=False):

for burst in bench_parameters.burst:
rate = bench_parameters.rate[0]
Print.heading(f'\nRunning {n} nodes (input rate: {rate:,} tx/s, burst : {burst:,})')
Print.heading(f'\nRunning {n} nodes (input rate: {(rate*len(bench_parameters.nodes)*(int(1000/burst))):,} tx/s, burst : {burst:,})')

# Run the benchmark.
for i in range(bench_parameters.runs):
Expand All @@ -356,7 +356,7 @@ def run(self, bench_parameters_dict, node_parameters_dict, debug=False):
)

faults = bench_parameters.faults
logger = self._logs(committee_copy, faults)
logger = self._logs(committee_copy, burst, faults)
logger.print(PathMaker.result_file(
faults,
n,
Expand Down
2 changes: 1 addition & 1 deletion benchmark/fabfile.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@ def local(ctx, debug=True):
'faults': 0,
'nodes': 4,
'workers': 1,
'rate': 50_000,
'rate': 4000,
'tx_size': 512,
'duration': 20,
"burst" : 50
Expand Down
2 changes: 1 addition & 1 deletion node/src/benchmark_client.rs
Original file line number Diff line number Diff line change
Expand Up @@ -64,7 +64,7 @@ async fn main() -> Result<()> {
info!("Transactions size: {} B", size);

// NOTE: This log entry is used to compute performance.
info!("Transactions rate: {} tx/s", rate);
info!("Transactions rate: {} tx/s", rate*(1000/burst_duration));

let client = Client {
target,
Expand Down

0 comments on commit bc70a20

Please sign in to comment.