From 3717eb13c7c8d69c954f6651133aff42e6ef96bb Mon Sep 17 00:00:00 2001 From: Alberto Sonnino Date: Tue, 6 Jul 2021 12:59:29 +0100 Subject: [PATCH] Better debug log print --- benchmark/benchmark/logs.py | 12 ++++++++---- benchmark/fabfile.py | 20 ++++++++++---------- 2 files changed, 18 insertions(+), 14 deletions(-) diff --git a/benchmark/benchmark/logs.py b/benchmark/benchmark/logs.py index 939de177..c29e77d3 100644 --- a/benchmark/benchmark/logs.py +++ b/benchmark/benchmark/logs.py @@ -21,8 +21,12 @@ def __init__(self, clients, primaries, workers, faults=0): assert all(x for x in inputs) self.faults = faults - self.committee_size = len(primaries) + faults - self.workers = len(workers) // len(primaries) + if isinstance(faults, int): + self.committee_size = len(primaries) + int(faults) + self.workers = len(workers) // len(primaries) + else: + self.committee_size = '?' + self.workers = '?' # Parse the clients logs. try: @@ -203,8 +207,8 @@ def result(self): ' SUMMARY:\n' '-----------------------------------------\n' ' + CONFIG:\n' - f' Faults: {self.faults:,} node(s)\n' - f' Committee size: {self.committee_size:,} node(s)\n' + f' Faults: {self.faults} node(s)\n' + f' Committee size: {self.committee_size} node(s)\n' f' Worker(s) per node: {self.workers} worker(s)\n' f' Collocate primary and workers: {self.collocate}\n' f' Input rate: {sum(self.rate):,} tx/s\n' diff --git a/benchmark/fabfile.py b/benchmark/fabfile.py index c6727652..f2feaf85 100644 --- a/benchmark/fabfile.py +++ b/benchmark/fabfile.py @@ -94,11 +94,11 @@ def install(ctx): def remote(ctx, debug=False): ''' Run benchmarks on AWS ''' bench_params = { - 'faults': 0, - 'nodes': [4], - 'workers': 7, - 'collocate': False, - 'rate': [500_000], + 'faults': 3, + 'nodes': [10], + 'workers': 1, + 'collocate': True, + 'rate': [10_000, 110_000], 'tx_size': 512, 'duration': 300, 'runs': 2, @@ -122,10 +122,10 @@ def remote(ctx, debug=False): def plot(ctx): ''' Plot performance using the logs generated by "fab remote" ''' plot_params = { - 'faults': [0], - 'nodes': [4], - 'workers': [4, 7], - 'collocate': False, + 'faults': [0, 1, 3], + 'nodes': [10], + 'workers': [1], + 'collocate': True, 'tx_size': 512, 'max_latency': [3_500, 4_500] } @@ -148,6 +148,6 @@ def kill(ctx): def logs(ctx): ''' Print a summary of the logs ''' try: - print(LogParser.process('./logs').result()) + print(LogParser.process('./logs', faults='?').result()) except ParseError as e: Print.error(BenchError('Failed to parse logs', e))