1#!/usr/bin/env python3 2# -*- coding: utf-8 -*- 3#*************************************************************************** 4# _ _ ____ _ 5# Project ___| | | | _ \| | 6# / __| | | | |_) | | 7# | (__| |_| | _ <| |___ 8# \___|\___/|_| \_\_____| 9# 10# Copyright (C) Daniel Stenberg, <daniel@haxx.se>, et al. 11# 12# This software is licensed as described in the file COPYING, which 13# you should have received as part of this distribution. The terms 14# are also available at https://curl.se/docs/copyright.html. 15# 16# You may opt to use, copy, modify, merge, publish, distribute and/or sell 17# copies of the Software, and permit persons to whom the Software is 18# furnished to do so, under the terms of the COPYING file. 19# 20# This software is distributed on an "AS IS" basis, WITHOUT WARRANTY OF ANY 21# KIND, either express or implied. 22# 23# SPDX-License-Identifier: curl 24# 25########################################################################### 26# 27import argparse 28import json 29import logging 30import os 31import re 32import sys 33from statistics import mean 34from typing import Dict, Any, Optional, List 35 36from testenv import Env, Httpd, Nghttpx, CurlClient, Caddy, ExecResult, NghttpxQuic, RunProfile 37 38log = logging.getLogger(__name__) 39 40 41class ScoreCardException(Exception): 42 pass 43 44 45class ScoreCard: 46 47 def __init__(self, env: Env, 48 httpd: Optional[Httpd], 49 nghttpx: Optional[Nghttpx], 50 caddy: Optional[Caddy], 51 verbose: int, 52 curl_verbose: int, 53 download_parallel: int = 0): 54 self.verbose = verbose 55 self.env = env 56 self.httpd = httpd 57 self.nghttpx = nghttpx 58 self.caddy = caddy 59 self._silent_curl = not curl_verbose 60 self._download_parallel = download_parallel 61 62 def info(self, msg): 63 if self.verbose > 0: 64 sys.stderr.write(msg) 65 sys.stderr.flush() 66 67 def handshakes(self, proto: str) -> Dict[str, Any]: 68 props = {} 69 sample_size = 5 70 self.info(f'TLS Handshake\n') 71 for authority in [ 72 'curl.se', 'google.com', 'cloudflare.com', 'nghttp2.org' 73 ]: 74 self.info(f' {authority}...') 75 props[authority] = {} 76 for ipv in ['ipv4', 'ipv6']: 77 self.info(f'{ipv}...') 78 c_samples = [] 79 hs_samples = [] 80 errors = [] 81 for i in range(sample_size): 82 curl = CurlClient(env=self.env, silent=self._silent_curl) 83 args = [ 84 '--http3-only' if proto == 'h3' else '--http2', 85 f'--{ipv}', f'https://{authority}/' 86 ] 87 r = curl.run_direct(args=args, with_stats=True) 88 if r.exit_code == 0 and len(r.stats) == 1: 89 c_samples.append(r.stats[0]['time_connect']) 90 hs_samples.append(r.stats[0]['time_appconnect']) 91 else: 92 errors.append(f'exit={r.exit_code}') 93 props[authority][f'{ipv}-connect'] = mean(c_samples) \ 94 if len(c_samples) else -1 95 props[authority][f'{ipv}-handshake'] = mean(hs_samples) \ 96 if len(hs_samples) else -1 97 props[authority][f'{ipv}-errors'] = errors 98 self.info('ok.\n') 99 return props 100 101 def _make_docs_file(self, docs_dir: str, fname: str, fsize: int): 102 fpath = os.path.join(docs_dir, fname) 103 data1k = 1024*'x' 104 flen = 0 105 with open(fpath, 'w') as fd: 106 while flen < fsize: 107 fd.write(data1k) 108 flen += len(data1k) 109 return fpath 110 111 def _check_downloads(self, r: ExecResult, count: int): 112 error = '' 113 if r.exit_code != 0: 114 error += f'exit={r.exit_code} ' 115 if r.exit_code != 0 or len(r.stats) != count: 116 error += f'stats={len(r.stats)}/{count} ' 117 fails = [s for s in r.stats if s['response_code'] != 200] 118 if len(fails) > 0: 119 error += f'{len(fails)} failed' 120 return error if len(error) > 0 else None 121 122 def transfer_single(self, url: str, proto: str, count: int): 123 sample_size = count 124 count = 1 125 samples = [] 126 errors = [] 127 profiles = [] 128 self.info(f'single...') 129 for i in range(sample_size): 130 curl = CurlClient(env=self.env, silent=self._silent_curl) 131 r = curl.http_download(urls=[url], alpn_proto=proto, no_save=True, 132 with_headers=False, with_profile=True) 133 err = self._check_downloads(r, count) 134 if err: 135 errors.append(err) 136 else: 137 total_size = sum([s['size_download'] for s in r.stats]) 138 samples.append(total_size / r.duration.total_seconds()) 139 profiles.append(r.profile) 140 return { 141 'count': count, 142 'samples': sample_size, 143 'max-parallel': 1, 144 'speed': mean(samples) if len(samples) else -1, 145 'errors': errors, 146 'stats': RunProfile.AverageStats(profiles), 147 } 148 149 def transfer_serial(self, url: str, proto: str, count: int): 150 sample_size = 1 151 samples = [] 152 errors = [] 153 profiles = [] 154 url = f'{url}?[0-{count - 1}]' 155 self.info(f'serial...') 156 for i in range(sample_size): 157 curl = CurlClient(env=self.env, silent=self._silent_curl) 158 r = curl.http_download(urls=[url], alpn_proto=proto, no_save=True, 159 with_headers=False, with_profile=True) 160 err = self._check_downloads(r, count) 161 if err: 162 errors.append(err) 163 else: 164 total_size = sum([s['size_download'] for s in r.stats]) 165 samples.append(total_size / r.duration.total_seconds()) 166 profiles.append(r.profile) 167 return { 168 'count': count, 169 'samples': sample_size, 170 'max-parallel': 1, 171 'speed': mean(samples) if len(samples) else -1, 172 'errors': errors, 173 'stats': RunProfile.AverageStats(profiles), 174 } 175 176 def transfer_parallel(self, url: str, proto: str, count: int): 177 sample_size = 1 178 samples = [] 179 errors = [] 180 profiles = [] 181 max_parallel = self._download_parallel if self._download_parallel > 0 else count 182 url = f'{url}?[0-{count - 1}]' 183 self.info(f'parallel...') 184 for i in range(sample_size): 185 curl = CurlClient(env=self.env, silent=self._silent_curl) 186 r = curl.http_download(urls=[url], alpn_proto=proto, no_save=True, 187 with_headers=False, 188 with_profile=True, 189 extra_args=['--parallel', 190 '--parallel-max', str(max_parallel)]) 191 err = self._check_downloads(r, count) 192 if err: 193 errors.append(err) 194 else: 195 total_size = sum([s['size_download'] for s in r.stats]) 196 samples.append(total_size / r.duration.total_seconds()) 197 profiles.append(r.profile) 198 return { 199 'count': count, 200 'samples': sample_size, 201 'max-parallel': max_parallel, 202 'speed': mean(samples) if len(samples) else -1, 203 'errors': errors, 204 'stats': RunProfile.AverageStats(profiles), 205 } 206 207 def download_url(self, label: str, url: str, proto: str, count: int): 208 self.info(f' {count}x{label}: ') 209 props = { 210 'single': self.transfer_single(url=url, proto=proto, count=10), 211 } 212 if count > 1: 213 props['serial'] = self.transfer_serial(url=url, proto=proto, 214 count=count) 215 props['parallel'] = self.transfer_parallel(url=url, proto=proto, 216 count=count) 217 self.info(f'ok.\n') 218 return props 219 220 def downloads(self, proto: str, count: int, 221 fsizes: List[int]) -> Dict[str, Any]: 222 scores = {} 223 if self.httpd: 224 if proto == 'h3': 225 port = self.env.h3_port 226 via = 'nghttpx' 227 descr = f'port {port}, proxying httpd' 228 else: 229 port = self.env.https_port 230 via = 'httpd' 231 descr = f'port {port}' 232 self.info(f'{via} downloads\n') 233 scores[via] = { 234 'description': descr, 235 } 236 for fsize in fsizes: 237 label = self.fmt_size(fsize) 238 fname = f'score{label}.data' 239 self._make_docs_file(docs_dir=self.httpd.docs_dir, 240 fname=fname, fsize=fsize) 241 url = f'https://{self.env.domain1}:{port}/{fname}' 242 results = self.download_url(label=label, url=url, 243 proto=proto, count=count) 244 scores[via][label] = results 245 if self.caddy: 246 port = self.caddy.port 247 via = 'caddy' 248 descr = f'port {port}' 249 self.info('caddy downloads\n') 250 scores[via] = { 251 'description': descr, 252 } 253 for fsize in fsizes: 254 label = self.fmt_size(fsize) 255 fname = f'score{label}.data' 256 self._make_docs_file(docs_dir=self.caddy.docs_dir, 257 fname=fname, fsize=fsize) 258 url = f'https://{self.env.domain1}:{port}/{fname}' 259 results = self.download_url(label=label, url=url, 260 proto=proto, count=count) 261 scores[via][label] = results 262 return scores 263 264 def _check_uploads(self, r: ExecResult, count: int): 265 error = '' 266 if r.exit_code != 0: 267 error += f'exit={r.exit_code} ' 268 if r.exit_code != 0 or len(r.stats) != count: 269 error += f'stats={len(r.stats)}/{count} ' 270 fails = [s for s in r.stats if s['response_code'] != 200] 271 if len(fails) > 0: 272 error += f'{len(fails)} failed' 273 for f in fails: 274 error += f'[{f["response_code"]}]' 275 return error if len(error) > 0 else None 276 277 def upload_single(self, url: str, proto: str, fpath: str, count: int): 278 sample_size = count 279 count = 1 280 samples = [] 281 errors = [] 282 profiles = [] 283 self.info(f'single...') 284 for i in range(sample_size): 285 curl = CurlClient(env=self.env, silent=self._silent_curl) 286 r = curl.http_put(urls=[url], fdata=fpath, alpn_proto=proto, 287 with_headers=False, with_profile=True) 288 err = self._check_uploads(r, count) 289 if err: 290 errors.append(err) 291 else: 292 total_size = sum([s['size_upload'] for s in r.stats]) 293 samples.append(total_size / r.duration.total_seconds()) 294 profiles.append(r.profile) 295 return { 296 'count': count, 297 'samples': sample_size, 298 'max-parallel': 1, 299 'speed': mean(samples) if len(samples) else -1, 300 'errors': errors, 301 'stats': RunProfile.AverageStats(profiles) if len(profiles) else {}, 302 } 303 304 def upload_serial(self, url: str, proto: str, fpath: str, count: int): 305 sample_size = 1 306 samples = [] 307 errors = [] 308 profiles = [] 309 url = f'{url}?id=[0-{count - 1}]' 310 self.info(f'serial...') 311 for i in range(sample_size): 312 curl = CurlClient(env=self.env, silent=self._silent_curl) 313 r = curl.http_put(urls=[url], fdata=fpath, alpn_proto=proto, 314 with_headers=False, with_profile=True) 315 err = self._check_uploads(r, count) 316 if err: 317 errors.append(err) 318 else: 319 total_size = sum([s['size_upload'] for s in r.stats]) 320 samples.append(total_size / r.duration.total_seconds()) 321 profiles.append(r.profile) 322 return { 323 'count': count, 324 'samples': sample_size, 325 'max-parallel': 1, 326 'speed': mean(samples) if len(samples) else -1, 327 'errors': errors, 328 'stats': RunProfile.AverageStats(profiles) if len(profiles) else {}, 329 } 330 331 def upload_parallel(self, url: str, proto: str, fpath: str, count: int): 332 sample_size = 1 333 samples = [] 334 errors = [] 335 profiles = [] 336 max_parallel = count 337 url = f'{url}?id=[0-{count - 1}]' 338 self.info(f'parallel...') 339 for i in range(sample_size): 340 curl = CurlClient(env=self.env, silent=self._silent_curl) 341 r = curl.http_put(urls=[url], fdata=fpath, alpn_proto=proto, 342 with_headers=False, with_profile=True, 343 extra_args=[ 344 '--parallel', 345 '--parallel-max', str(max_parallel) 346 ]) 347 err = self._check_uploads(r, count) 348 if err: 349 errors.append(err) 350 else: 351 total_size = sum([s['size_upload'] for s in r.stats]) 352 samples.append(total_size / r.duration.total_seconds()) 353 profiles.append(r.profile) 354 return { 355 'count': count, 356 'samples': sample_size, 357 'max-parallel': max_parallel, 358 'speed': mean(samples) if len(samples) else -1, 359 'errors': errors, 360 'stats': RunProfile.AverageStats(profiles) if len(profiles) else {}, 361 } 362 363 def upload_url(self, label: str, url: str, fpath: str, proto: str, count: int): 364 self.info(f' {count}x{label}: ') 365 props = { 366 'single': self.upload_single(url=url, proto=proto, fpath=fpath, 367 count=10), 368 } 369 if count > 1: 370 props['serial'] = self.upload_serial(url=url, proto=proto, 371 fpath=fpath, count=count) 372 props['parallel'] = self.upload_parallel(url=url, proto=proto, 373 fpath=fpath, count=count) 374 self.info(f'ok.\n') 375 return props 376 377 def uploads(self, proto: str, count: int, 378 fsizes: List[int]) -> Dict[str, Any]: 379 scores = {} 380 if self.httpd: 381 if proto == 'h3': 382 port = self.env.h3_port 383 via = 'nghttpx' 384 descr = f'port {port}, proxying httpd' 385 else: 386 port = self.env.https_port 387 via = 'httpd' 388 descr = f'port {port}' 389 self.info(f'{via} uploads\n') 390 scores[via] = { 391 'description': descr, 392 } 393 for fsize in fsizes: 394 label = self.fmt_size(fsize) 395 fname = f'upload{label}.data' 396 fpath = self._make_docs_file(docs_dir=self.env.gen_dir, 397 fname=fname, fsize=fsize) 398 url = f'https://{self.env.domain1}:{port}/curltest/put' 399 results = self.upload_url(label=label, url=url, fpath=fpath, 400 proto=proto, count=count) 401 scores[via][label] = results 402 if self.caddy: 403 port = self.caddy.port 404 via = 'caddy' 405 descr = f'port {port}' 406 self.info('caddy uploads\n') 407 scores[via] = { 408 'description': descr, 409 } 410 for fsize in fsizes: 411 label = self.fmt_size(fsize) 412 fname = f'upload{label}.data' 413 fpath = self._make_docs_file(docs_dir=self.env.gen_dir, 414 fname=fname, fsize=fsize) 415 url = f'https://{self.env.domain2}:{port}/curltest/put' 416 results = self.upload_url(label=label, url=url, fpath=fpath, 417 proto=proto, count=count) 418 scores[via][label] = results 419 return scores 420 421 def do_requests(self, url: str, proto: str, count: int, 422 max_parallel: int = 1): 423 sample_size = 1 424 samples = [] 425 errors = [] 426 profiles = [] 427 url = f'{url}?[0-{count - 1}]' 428 extra_args = [ 429 '-w', '%{response_code},\\n', 430 ] 431 if max_parallel > 1: 432 extra_args.extend([ 433 '--parallel', '--parallel-max', str(max_parallel) 434 ]) 435 self.info(f'{max_parallel}...') 436 for i in range(sample_size): 437 curl = CurlClient(env=self.env, silent=self._silent_curl) 438 r = curl.http_download(urls=[url], alpn_proto=proto, no_save=True, 439 with_headers=False, with_profile=True, 440 with_stats=False, extra_args=extra_args) 441 if r.exit_code != 0: 442 errors.append(f'exit={r.exit_code}') 443 else: 444 samples.append(count / r.duration.total_seconds()) 445 non_200s = 0 446 for l in r.stdout.splitlines(): 447 if not l.startswith('200,'): 448 non_200s += 1 449 if non_200s > 0: 450 errors.append(f'responses != 200: {non_200s}') 451 profiles.append(r.profile) 452 return { 453 'count': count, 454 'samples': sample_size, 455 'speed': mean(samples) if len(samples) else -1, 456 'errors': errors, 457 'stats': RunProfile.AverageStats(profiles), 458 } 459 460 def requests_url(self, url: str, proto: str, count: int): 461 self.info(f' {url}: ') 462 props = {} 463 # 300 is max in curl, see tool_main.h 464 for m in [1, 6, 25, 50, 100, 300]: 465 props[str(m)] = self.do_requests(url=url, proto=proto, count=count, 466 max_parallel=m) 467 self.info(f'ok.\n') 468 return props 469 470 def requests(self, proto: str, req_count) -> Dict[str, Any]: 471 scores = {} 472 if self.httpd: 473 if proto == 'h3': 474 port = self.env.h3_port 475 via = 'nghttpx' 476 descr = f'port {port}, proxying httpd' 477 else: 478 port = self.env.https_port 479 via = 'httpd' 480 descr = f'port {port}' 481 self.info(f'{via} requests\n') 482 self._make_docs_file(docs_dir=self.httpd.docs_dir, 483 fname='reqs10.data', fsize=10*1024) 484 url1 = f'https://{self.env.domain1}:{port}/reqs10.data' 485 scores[via] = { 486 'description': descr, 487 'count': req_count, 488 '10KB': self.requests_url(url=url1, proto=proto, count=req_count), 489 } 490 if self.caddy: 491 port = self.caddy.port 492 via = 'caddy' 493 descr = f'port {port}' 494 self.info('caddy requests\n') 495 self._make_docs_file(docs_dir=self.caddy.docs_dir, 496 fname='req10.data', fsize=10 * 1024) 497 url1 = f'https://{self.env.domain1}:{port}/req10.data' 498 scores[via] = { 499 'description': descr, 500 'count': req_count, 501 '10KB': self.requests_url(url=url1, proto=proto, count=req_count), 502 } 503 return scores 504 505 def score_proto(self, proto: str, 506 handshakes: bool = True, 507 downloads: Optional[List[int]] = None, 508 download_count: int = 50, 509 uploads: Optional[List[int]] = None, 510 upload_count: int = 50, 511 req_count=5000, 512 requests: bool = True): 513 self.info(f"scoring {proto}\n") 514 p = {} 515 if proto == 'h3': 516 p['name'] = 'h3' 517 if not self.env.have_h3_curl(): 518 raise ScoreCardException('curl does not support HTTP/3') 519 for lib in ['ngtcp2', 'quiche', 'msh3', 'nghttp3']: 520 if self.env.curl_uses_lib(lib): 521 p['implementation'] = lib 522 break 523 elif proto == 'h2': 524 p['name'] = 'h2' 525 if not self.env.have_h2_curl(): 526 raise ScoreCardException('curl does not support HTTP/2') 527 for lib in ['nghttp2', 'hyper']: 528 if self.env.curl_uses_lib(lib): 529 p['implementation'] = lib 530 break 531 elif proto == 'h1' or proto == 'http/1.1': 532 proto = 'http/1.1' 533 p['name'] = proto 534 p['implementation'] = 'hyper' if self.env.curl_uses_lib('hyper')\ 535 else 'native' 536 else: 537 raise ScoreCardException(f"unknown protocol: {proto}") 538 539 if 'implementation' not in p: 540 raise ScoreCardException(f'did not recognized {p} lib') 541 p['version'] = Env.curl_lib_version(p['implementation']) 542 543 score = { 544 'curl': self.env.curl_fullname(), 545 'os': self.env.curl_os(), 546 'protocol': p, 547 } 548 if handshakes: 549 score['handshakes'] = self.handshakes(proto=proto) 550 if downloads and len(downloads) > 0: 551 score['downloads'] = self.downloads(proto=proto, 552 count=download_count, 553 fsizes=downloads) 554 if uploads and len(uploads) > 0: 555 score['uploads'] = self.uploads(proto=proto, 556 count=upload_count, 557 fsizes=uploads) 558 if requests: 559 score['requests'] = self.requests(proto=proto, req_count=req_count) 560 self.info("\n") 561 return score 562 563 def fmt_ms(self, tval): 564 return f'{int(tval*1000)} ms' if tval >= 0 else '--' 565 566 def fmt_size(self, val): 567 if val >= (1024*1024*1024): 568 return f'{val / (1024*1024*1024):0.000f}GB' 569 elif val >= (1024 * 1024): 570 return f'{val / (1024*1024):0.000f}MB' 571 elif val >= 1024: 572 return f'{val / 1024:0.000f}KB' 573 else: 574 return f'{val:0.000f}B' 575 576 def fmt_mbs(self, val): 577 return f'{val/(1024*1024):0.000f} MB/s' if val >= 0 else '--' 578 579 def fmt_reqs(self, val): 580 return f'{val:0.000f} r/s' if val >= 0 else '--' 581 582 def print_score(self, score): 583 print(f'{score["protocol"]["name"].upper()} in {score["curl"]}') 584 if 'handshakes' in score: 585 print(f'{"Handshakes":<24} {"ipv4":25} {"ipv6":28}') 586 print(f' {"Host":<17} {"Connect":>12} {"Handshake":>12} ' 587 f'{"Connect":>12} {"Handshake":>12} {"Errors":<20}') 588 for key, val in score["handshakes"].items(): 589 print(f' {key:<17} {self.fmt_ms(val["ipv4-connect"]):>12} ' 590 f'{self.fmt_ms(val["ipv4-handshake"]):>12} ' 591 f'{self.fmt_ms(val["ipv6-connect"]):>12} ' 592 f'{self.fmt_ms(val["ipv6-handshake"]):>12} ' 593 f'{"/".join(val["ipv4-errors"] + val["ipv6-errors"]):<20}' 594 ) 595 if 'downloads' in score: 596 # get the key names of all sizes and measurements made 597 sizes = [] 598 measures = [] 599 m_names = {} 600 mcol_width = 12 601 mcol_sw = 17 602 for server, server_score in score['downloads'].items(): 603 for sskey, ssval in server_score.items(): 604 if isinstance(ssval, str): 605 continue 606 if sskey not in sizes: 607 sizes.append(sskey) 608 for mkey, mval in server_score[sskey].items(): 609 if mkey not in measures: 610 measures.append(mkey) 611 m_names[mkey] = f'{mkey}({mval["count"]}x{mval["max-parallel"]})' 612 613 print('Downloads') 614 print(f' {"Server":<8} {"Size":>8}', end='') 615 for m in measures: print(f' {m_names[m]:>{mcol_width}} {"[cpu/rss]":<{mcol_sw}}', end='') 616 print(f' {"Errors":^20}') 617 618 for server in score['downloads']: 619 for size in sizes: 620 size_score = score['downloads'][server][size] 621 print(f' {server:<8} {size:>8}', end='') 622 errors = [] 623 for key, val in size_score.items(): 624 if 'errors' in val: 625 errors.extend(val['errors']) 626 for m in measures: 627 if m in size_score: 628 print(f' {self.fmt_mbs(size_score[m]["speed"]):>{mcol_width}}', end='') 629 s = f'[{size_score[m]["stats"]["cpu"]:>.1f}%'\ 630 f'/{self.fmt_size(size_score[m]["stats"]["rss"])}]' 631 print(f' {s:<{mcol_sw}}', end='') 632 else: 633 print(' '*mcol_width, end='') 634 if len(errors): 635 print(f' {"/".join(errors):<20}') 636 else: 637 print(f' {"-":^20}') 638 639 if 'uploads' in score: 640 # get the key names of all sizes and measurements made 641 sizes = [] 642 measures = [] 643 m_names = {} 644 mcol_width = 12 645 mcol_sw = 17 646 for server, server_score in score['uploads'].items(): 647 for sskey, ssval in server_score.items(): 648 if isinstance(ssval, str): 649 continue 650 if sskey not in sizes: 651 sizes.append(sskey) 652 for mkey, mval in server_score[sskey].items(): 653 if mkey not in measures: 654 measures.append(mkey) 655 m_names[mkey] = f'{mkey}({mval["count"]}x{mval["max-parallel"]})' 656 657 print('Uploads') 658 print(f' {"Server":<8} {"Size":>8}', end='') 659 for m in measures: print(f' {m_names[m]:>{mcol_width}} {"[cpu/rss]":<{mcol_sw}}', end='') 660 print(f' {"Errors":^20}') 661 662 for server in score['uploads']: 663 for size in sizes: 664 size_score = score['uploads'][server][size] 665 print(f' {server:<8} {size:>8}', end='') 666 errors = [] 667 for key, val in size_score.items(): 668 if 'errors' in val: 669 errors.extend(val['errors']) 670 for m in measures: 671 if m in size_score: 672 print(f' {self.fmt_mbs(size_score[m]["speed"]):>{mcol_width}}', end='') 673 stats = size_score[m]["stats"] 674 if 'cpu' in stats: 675 s = f'[{stats["cpu"]:>.1f}%/{self.fmt_size(stats["rss"])}]' 676 else: 677 s = '[???/???]' 678 print(f' {s:<{mcol_sw}}', end='') 679 else: 680 print(' '*mcol_width, end='') 681 if len(errors): 682 print(f' {"/".join(errors):<20}') 683 else: 684 print(f' {"-":^20}') 685 686 if 'requests' in score: 687 sizes = [] 688 measures = [] 689 m_names = {} 690 mcol_width = 9 691 mcol_sw = 13 692 for server in score['requests']: 693 server_score = score['requests'][server] 694 for sskey, ssval in server_score.items(): 695 if isinstance(ssval, str) or isinstance(ssval, int): 696 continue 697 if sskey not in sizes: 698 sizes.append(sskey) 699 for mkey, mval in server_score[sskey].items(): 700 if mkey not in measures: 701 measures.append(mkey) 702 m_names[mkey] = f'{mkey}' 703 704 print('Requests, max in parallel') 705 print(f' {"Server":<8} {"Size":>6} {"Reqs":>6}', end='') 706 for m in measures: print(f' {m_names[m]:>{mcol_width}} {"[cpu/rss]":<{mcol_sw}}', end='') 707 print(f' {"Errors":^10}') 708 709 for server in score['requests']: 710 for size in sizes: 711 size_score = score['requests'][server][size] 712 count = score['requests'][server]['count'] 713 print(f' {server:<8} {size:>6} {count:>6}', end='') 714 errors = [] 715 for key, val in size_score.items(): 716 if 'errors' in val: 717 errors.extend(val['errors']) 718 for m in measures: 719 if m in size_score: 720 print(f' {self.fmt_reqs(size_score[m]["speed"]):>{mcol_width}}', end='') 721 s = f'[{size_score[m]["stats"]["cpu"]:>.1f}%'\ 722 f'/{self.fmt_size(size_score[m]["stats"]["rss"])}]' 723 print(f' {s:<{mcol_sw}}', end='') 724 else: 725 print(' '*mcol_width, end='') 726 if len(errors): 727 print(f' {"/".join(errors):<10}') 728 else: 729 print(f' {"-":^10}') 730 731 732def parse_size(s): 733 m = re.match(r'(\d+)(mb|kb|gb)?', s, re.IGNORECASE) 734 if m is None: 735 raise Exception(f'unrecognized size: {s}') 736 size = int(m.group(1)) 737 if not m.group(2): 738 pass 739 elif m.group(2).lower() == 'kb': 740 size *= 1024 741 elif m.group(2).lower() == 'mb': 742 size *= 1024 * 1024 743 elif m.group(2).lower() == 'gb': 744 size *= 1024 * 1024 * 1024 745 return size 746 747 748def main(): 749 parser = argparse.ArgumentParser(prog='scorecard', description=""" 750 Run a range of tests to give a scorecard for a HTTP protocol 751 'h3' or 'h2' implementation in curl. 752 """) 753 parser.add_argument("-v", "--verbose", action='count', default=1, 754 help="log more output on stderr") 755 parser.add_argument("-j", "--json", action='store_true', 756 default=False, help="print json instead of text") 757 parser.add_argument("-H", "--handshakes", action='store_true', 758 default=False, help="evaluate handshakes only") 759 parser.add_argument("-d", "--downloads", action='store_true', 760 default=False, help="evaluate downloads") 761 parser.add_argument("--download", action='append', type=str, 762 default=None, help="evaluate download size") 763 parser.add_argument("--download-count", action='store', type=int, 764 default=50, help="perform that many downloads") 765 parser.add_argument("--download-parallel", action='store', type=int, 766 default=0, help="perform that many downloads in parallel (default all)") 767 parser.add_argument("-u", "--uploads", action='store_true', 768 default=False, help="evaluate uploads") 769 parser.add_argument("--upload", action='append', type=str, 770 default=None, help="evaluate upload size") 771 parser.add_argument("--upload-count", action='store', type=int, 772 default=50, help="perform that many uploads") 773 parser.add_argument("-r", "--requests", action='store_true', 774 default=False, help="evaluate requests") 775 parser.add_argument("--request-count", action='store', type=int, 776 default=5000, help="perform that many requests") 777 parser.add_argument("--httpd", action='store_true', default=False, 778 help="evaluate httpd server only") 779 parser.add_argument("--caddy", action='store_true', default=False, 780 help="evaluate caddy server only") 781 parser.add_argument("--curl-verbose", action='store_true', 782 default=False, help="run curl with `-v`") 783 parser.add_argument("protocol", default='h2', nargs='?', 784 help="Name of protocol to score") 785 args = parser.parse_args() 786 787 if args.verbose > 0: 788 console = logging.StreamHandler() 789 console.setLevel(logging.INFO) 790 console.setFormatter(logging.Formatter(logging.BASIC_FORMAT)) 791 logging.getLogger('').addHandler(console) 792 793 protocol = args.protocol 794 handshakes = True 795 downloads = [1024 * 1024, 10 * 1024 * 1024, 100 * 1024 * 1024] 796 if args.download is not None: 797 downloads = [] 798 for x in args.download: 799 downloads.extend([parse_size(s) for s in x.split(',')]) 800 801 uploads = [1024 * 1024, 10 * 1024 * 1024, 100 * 1024 * 1024] 802 if args.upload is not None: 803 uploads = [] 804 for x in args.upload: 805 uploads.extend([parse_size(s) for s in x.split(',')]) 806 807 requests = True 808 if args.downloads or args.uploads or args.requests or args.handshakes: 809 handshakes = args.handshakes 810 if not args.downloads: 811 downloads = None 812 if not args.uploads: 813 uploads = None 814 requests = args.requests 815 816 test_httpd = protocol != 'h3' 817 test_caddy = True 818 if args.caddy or args.httpd: 819 test_caddy = args.caddy 820 test_httpd = args.httpd 821 822 rv = 0 823 env = Env() 824 env.setup() 825 env.test_timeout = None 826 httpd = None 827 nghttpx = None 828 caddy = None 829 try: 830 if test_httpd or (test_caddy and uploads): 831 print(f'httpd: {env.httpd_version()}, http:{env.http_port} https:{env.https_port}') 832 httpd = Httpd(env=env) 833 assert httpd.exists(), \ 834 f'httpd not found: {env.httpd}' 835 httpd.clear_logs() 836 assert httpd.start() 837 if test_httpd and 'h3' == protocol: 838 nghttpx = NghttpxQuic(env=env) 839 nghttpx.clear_logs() 840 assert nghttpx.start() 841 if test_caddy and env.caddy: 842 print(f'Caddy: {env.caddy_version()}, http:{env.caddy_http_port} https:{env.caddy_https_port}') 843 caddy = Caddy(env=env) 844 caddy.clear_logs() 845 assert caddy.start() 846 847 card = ScoreCard(env=env, httpd=httpd if test_httpd else None, 848 nghttpx=nghttpx, caddy=caddy if test_caddy else None, 849 verbose=args.verbose, curl_verbose=args.curl_verbose, 850 download_parallel=args.download_parallel) 851 score = card.score_proto(proto=protocol, 852 handshakes=handshakes, 853 downloads=downloads, 854 download_count=args.download_count, 855 uploads=uploads, 856 upload_count=args.upload_count, 857 req_count=args.request_count, 858 requests=requests) 859 if args.json: 860 print(json.JSONEncoder(indent=2).encode(score)) 861 else: 862 card.print_score(score) 863 864 except ScoreCardException as ex: 865 sys.stderr.write(f"ERROR: {str(ex)}\n") 866 rv = 1 867 except KeyboardInterrupt: 868 log.warning("aborted") 869 rv = 1 870 finally: 871 if caddy: 872 caddy.stop() 873 if nghttpx: 874 nghttpx.stop(wait_dead=False) 875 if httpd: 876 httpd.stop() 877 sys.exit(rv) 878 879 880if __name__ == "__main__": 881 main() 882