Book a Demo!
CoCalc Logo Icon
StoreFeaturesDocsShareSupportNewsAboutPoliciesSign UpSign In
freebsd
GitHub Repository: freebsd/pkg
Path: blob/main/external/curl/tests/http/testenv/curl.py
2654 views
1
#!/usr/bin/env python3
2
# -*- coding: utf-8 -*-
3
#***************************************************************************
4
# _ _ ____ _
5
# Project ___| | | | _ \| |
6
# / __| | | | |_) | |
7
# | (__| |_| | _ <| |___
8
# \___|\___/|_| \_\_____|
9
#
10
# Copyright (C) Daniel Stenberg, <[email protected]>, et al.
11
#
12
# This software is licensed as described in the file COPYING, which
13
# you should have received as part of this distribution. The terms
14
# are also available at https://curl.se/docs/copyright.html.
15
#
16
# You may opt to use, copy, modify, merge, publish, distribute and/or sell
17
# copies of the Software, and permit persons to whom the Software is
18
# furnished to do so, under the terms of the COPYING file.
19
#
20
# This software is distributed on an "AS IS" basis, WITHOUT WARRANTY OF ANY
21
# KIND, either express or implied.
22
#
23
# SPDX-License-Identifier: curl
24
#
25
###########################################################################
26
#
27
import json
28
import logging
29
import os
30
import sys
31
import time
32
from threading import Thread
33
34
import psutil
35
import re
36
import shutil
37
import subprocess
38
from statistics import mean, fmean
39
from datetime import timedelta, datetime
40
from typing import List, Optional, Dict, Union, Any
41
from urllib.parse import urlparse
42
43
from .env import Env
44
45
46
log = logging.getLogger(__name__)
47
48
49
class RunProfile:
50
51
STAT_KEYS = ['cpu', 'rss', 'vsz']
52
53
@classmethod
54
def AverageStats(cls, profiles: List['RunProfile']):
55
avg = {}
56
stats = [p.stats for p in profiles]
57
for key in cls.STAT_KEYS:
58
vals = [s[key] for s in stats]
59
avg[key] = mean(vals) if len(vals) else 0.0
60
return avg
61
62
def __init__(self, pid: int, started_at: datetime, run_dir):
63
self._pid = pid
64
self._started_at = started_at
65
self._duration = timedelta(seconds=0)
66
self._run_dir = run_dir
67
self._samples = []
68
self._psu = None
69
self._stats = None
70
71
@property
72
def duration(self) -> timedelta:
73
return self._duration
74
75
@property
76
def stats(self) -> Optional[Dict[str,Any]]:
77
return self._stats
78
79
def sample(self):
80
elapsed = datetime.now() - self._started_at
81
try:
82
if self._psu is None:
83
self._psu = psutil.Process(pid=self._pid)
84
mem = self._psu.memory_info()
85
self._samples.append({
86
'time': elapsed,
87
'cpu': self._psu.cpu_percent(),
88
'vsz': mem.vms,
89
'rss': mem.rss,
90
})
91
except psutil.NoSuchProcess:
92
pass
93
94
def finish(self):
95
self._duration = datetime.now() - self._started_at
96
if len(self._samples) > 0:
97
weights = [s['time'].total_seconds() for s in self._samples]
98
self._stats = {}
99
for key in self.STAT_KEYS:
100
self._stats[key] = fmean([s[key] for s in self._samples], weights)
101
else:
102
self._stats = None
103
self._psu = None
104
105
def __repr__(self):
106
return f'RunProfile[pid={self._pid}, '\
107
f'duration={self.duration.total_seconds():.3f}s, '\
108
f'stats={self.stats}]'
109
110
111
class PerfProfile:
112
113
def __init__(self, pid: int, run_dir):
114
self._pid = pid
115
self._run_dir = run_dir
116
self._proc = None
117
self._rc = 0
118
self._file = os.path.join(self._run_dir, 'curl.perf_stacks')
119
120
def start(self):
121
if os.path.exists(self._file):
122
os.remove(self._file)
123
args = [
124
'sudo', 'perf', 'record', '-F', '99', '-p', f'{self._pid}',
125
'-g', '--', 'sleep', '60'
126
]
127
self._proc = subprocess.Popen(args, text=True, cwd=self._run_dir, shell=False)
128
assert self._proc
129
130
def finish(self):
131
if self._proc:
132
self._proc.terminate()
133
self._rc = self._proc.returncode
134
with open(self._file, 'w') as cout:
135
p = subprocess.run([
136
'sudo', 'perf', 'script'
137
], stdout=cout, cwd=self._run_dir, shell=False)
138
rc = p.returncode
139
if rc != 0:
140
raise Exception(f'perf returned error {rc}')
141
142
@property
143
def file(self):
144
return self._file
145
146
147
class DTraceProfile:
148
149
def __init__(self, pid: int, run_dir):
150
self._pid = pid
151
self._run_dir = run_dir
152
self._proc = None
153
self._rc = 0
154
self._file = os.path.join(self._run_dir, 'curl.dtrace_stacks')
155
156
def start(self):
157
if os.path.exists(self._file):
158
os.remove(self._file)
159
args = [
160
'sudo', 'dtrace',
161
'-x', 'ustackframes=100',
162
'-n', f'profile-97 /pid == {self._pid}/ {{ @[ustack()] = count(); }} tick-60s {{ exit(0); }}',
163
'-o', f'{self._file}'
164
]
165
if sys.platform.startswith('darwin'):
166
# macOS seems to like this for producing symbols in user stacks
167
args.extend(['-p', f'{self._pid}'])
168
self._proc = subprocess.Popen(args, text=True, cwd=self._run_dir, shell=False)
169
assert self._proc
170
171
def finish(self):
172
if self._proc:
173
self._proc.terminate()
174
self._rc = self._proc.returncode
175
176
@property
177
def file(self):
178
return self._file
179
180
181
class RunTcpDump:
182
183
def __init__(self, env, run_dir):
184
self._env = env
185
self._run_dir = run_dir
186
self._proc = None
187
self._stdoutfile = os.path.join(self._run_dir, 'tcpdump.out')
188
self._stderrfile = os.path.join(self._run_dir, 'tcpdump.err')
189
190
def get_rsts(self, ports: List[int]|None = None) -> Optional[List[str]]:
191
if self._proc:
192
raise Exception('tcpdump still running')
193
lines = []
194
for line in open(self._stdoutfile):
195
m = re.match(r'.* IP 127\.0\.0\.1\.(\d+) [<>] 127\.0\.0\.1\.(\d+):.*', line)
196
if m:
197
sport = int(m.group(1))
198
dport = int(m.group(2))
199
if ports is None or sport in ports or dport in ports:
200
lines.append(line)
201
return lines
202
203
@property
204
def stats(self) -> Optional[List[str]]:
205
return self.get_rsts()
206
207
@property
208
def stderr(self) -> List[str]:
209
if self._proc:
210
raise Exception('tcpdump still running')
211
return open(self._stderrfile).readlines()
212
213
def sample(self):
214
# not sure how to make that detection reliable for all platforms
215
local_if = 'lo0' if sys.platform.startswith('darwin') else 'lo'
216
try:
217
tcpdump = self._env.tcpdump()
218
if tcpdump is None:
219
raise Exception('tcpdump not available')
220
# look with tcpdump for TCP RST packets which indicate
221
# we did not shut down connections cleanly
222
args = []
223
# at least on Linux, we need root permissions to run tcpdump
224
if sys.platform.startswith('linux'):
225
args.append('sudo')
226
args.extend([
227
tcpdump, '-i', local_if, '-n', 'tcp[tcpflags] & (tcp-rst)!=0'
228
])
229
with open(self._stdoutfile, 'w') as cout, open(self._stderrfile, 'w') as cerr:
230
self._proc = subprocess.Popen(args, stdout=cout, stderr=cerr,
231
text=True, cwd=self._run_dir,
232
shell=False)
233
assert self._proc
234
assert self._proc.returncode is None
235
while self._proc:
236
try:
237
self._proc.wait(timeout=1)
238
except subprocess.TimeoutExpired:
239
pass
240
except Exception:
241
log.exception('Tcpdump')
242
243
def start(self):
244
def do_sample():
245
self.sample()
246
t = Thread(target=do_sample)
247
t.start()
248
249
def finish(self):
250
if self._proc:
251
time.sleep(1)
252
self._proc.terminate()
253
self._proc = None
254
255
256
class ExecResult:
257
258
def __init__(self, args: List[str], exit_code: int,
259
stdout: List[str], stderr: List[str],
260
duration: Optional[timedelta] = None,
261
with_stats: bool = False,
262
exception: Optional[str] = None,
263
profile: Optional[RunProfile] = None,
264
tcpdump: Optional[RunTcpDump] = None):
265
self._args = args
266
self._exit_code = exit_code
267
self._exception = exception
268
self._stdout = stdout
269
self._stderr = stderr
270
self._profile = profile
271
self._tcpdump = tcpdump
272
self._duration = duration if duration is not None else timedelta()
273
self._response = None
274
self._responses = []
275
self._results = {}
276
self._assets = []
277
self._stats = []
278
self._json_out = None
279
self._with_stats = with_stats
280
if with_stats:
281
self._parse_stats()
282
else:
283
# noinspection PyBroadException
284
try:
285
out = ''.join(self._stdout)
286
self._json_out = json.loads(out)
287
except: # noqa: E722
288
pass
289
290
def __repr__(self):
291
return f"ExecResult[code={self.exit_code}, exception={self._exception}, "\
292
f"args={self._args}, stdout={self._stdout}, stderr={self._stderr}]"
293
294
def _parse_stats(self):
295
self._stats = []
296
for line in self._stdout:
297
try:
298
self._stats.append(json.loads(line))
299
# TODO: specify specific exceptions here
300
except: # noqa: E722
301
log.exception(f'not a JSON stat: {line}')
302
break
303
304
@property
305
def exit_code(self) -> int:
306
return self._exit_code
307
308
@property
309
def args(self) -> List[str]:
310
return self._args
311
312
@property
313
def outraw(self) -> bytes:
314
return ''.join(self._stdout).encode()
315
316
@property
317
def stdout(self) -> str:
318
return ''.join(self._stdout)
319
320
@property
321
def json(self) -> Optional[Dict]:
322
"""Output as JSON dictionary or None if not parseable."""
323
return self._json_out
324
325
@property
326
def stderr(self) -> str:
327
return ''.join(self._stderr)
328
329
@property
330
def trace_lines(self) -> List[str]:
331
return self._stderr
332
333
@property
334
def duration(self) -> timedelta:
335
return self._duration
336
337
@property
338
def profile(self) -> Optional[RunProfile]:
339
return self._profile
340
341
@property
342
def tcpdump(self) -> Optional[RunTcpDump]:
343
return self._tcpdump
344
345
@property
346
def response(self) -> Optional[Dict]:
347
return self._response
348
349
@property
350
def responses(self) -> List[Dict]:
351
return self._responses
352
353
@property
354
def results(self) -> Dict:
355
return self._results
356
357
@property
358
def assets(self) -> List:
359
return self._assets
360
361
@property
362
def with_stats(self) -> bool:
363
return self._with_stats
364
365
@property
366
def stats(self) -> List:
367
return self._stats
368
369
@property
370
def total_connects(self) -> Optional[int]:
371
if len(self.stats):
372
n = 0
373
for stat in self.stats:
374
n += stat['num_connects']
375
return n
376
return None
377
378
def add_response(self, resp: Dict):
379
self._response = resp
380
self._responses.append(resp)
381
382
def add_results(self, results: Dict):
383
self._results.update(results)
384
if 'response' in results:
385
self.add_response(results['response'])
386
387
def add_assets(self, assets: List):
388
self._assets.extend(assets)
389
390
def check_exit_code(self, code: Union[int, bool]):
391
if code is True:
392
assert self.exit_code == 0, f'expected exit code {code}, '\
393
f'got {self.exit_code}\n{self.dump_logs()}'
394
elif code is False:
395
assert self.exit_code != 0, f'expected exit code {code}, '\
396
f'got {self.exit_code}\n{self.dump_logs()}'
397
else:
398
assert self.exit_code == code, f'expected exit code {code}, '\
399
f'got {self.exit_code}\n{self.dump_logs()}'
400
401
def check_response(self, http_status: Optional[int] = 200,
402
count: Optional[int] = 1,
403
protocol: Optional[str] = None,
404
exitcode: Optional[int] = 0,
405
connect_count: Optional[int] = None):
406
if exitcode:
407
self.check_exit_code(exitcode)
408
if self.with_stats and isinstance(exitcode, int):
409
for idx, x in enumerate(self.stats):
410
if 'exitcode' in x:
411
assert int(x['exitcode']) == exitcode, \
412
f'response #{idx} exitcode: expected {exitcode}, '\
413
f'got {x["exitcode"]}\n{self.dump_logs()}'
414
415
if self.with_stats:
416
assert len(self.stats) == count, \
417
f'response count: expected {count}, ' \
418
f'got {len(self.stats)}\n{self.dump_logs()}'
419
else:
420
assert len(self.responses) == count, \
421
f'response count: expected {count}, ' \
422
f'got {len(self.responses)}\n{self.dump_logs()}'
423
if http_status is not None:
424
if self.with_stats:
425
for idx, x in enumerate(self.stats):
426
assert 'http_code' in x, \
427
f'response #{idx} reports no http_code\n{self.dump_stat(x)}'
428
assert x['http_code'] == http_status, \
429
f'response #{idx} http_code: expected {http_status}, '\
430
f'got {x["http_code"]}\n{self.dump_stat(x)}'
431
else:
432
for idx, x in enumerate(self.responses):
433
assert x['status'] == http_status, \
434
f'response #{idx} status: expected {http_status},'\
435
f'got {x["status"]}\n{self.dump_stat(x)}'
436
if protocol is not None:
437
if self.with_stats:
438
http_version = None
439
if protocol == 'HTTP/1.1':
440
http_version = '1.1'
441
elif protocol == 'HTTP/2':
442
http_version = '2'
443
elif protocol == 'HTTP/3':
444
http_version = '3'
445
if http_version is not None:
446
for idx, x in enumerate(self.stats):
447
assert x['http_version'] == http_version, \
448
f'response #{idx} protocol: expected http/{http_version},' \
449
f'got version {x["http_version"]}\n{self.dump_stat(x)}'
450
else:
451
for idx, x in enumerate(self.responses):
452
assert x['protocol'] == protocol, \
453
f'response #{idx} protocol: expected {protocol},'\
454
f'got {x["protocol"]}\n{self.dump_logs()}'
455
if connect_count is not None:
456
assert self.total_connects == connect_count, \
457
f'expected {connect_count}, but {self.total_connects} '\
458
f'were made\n{self.dump_logs()}'
459
460
def check_stats(self, count: int, http_status: Optional[int] = None,
461
exitcode: Optional[Union[int, List[int]]] = None,
462
remote_port: Optional[int] = None,
463
remote_ip: Optional[str] = None):
464
if exitcode is None:
465
self.check_exit_code(0)
466
elif isinstance(exitcode, int):
467
exitcode = [exitcode]
468
assert len(self.stats) == count, \
469
f'stats count: expected {count}, got {len(self.stats)}\n{self.dump_logs()}'
470
if http_status is not None:
471
for idx, x in enumerate(self.stats):
472
assert 'http_code' in x, \
473
f'status #{idx} reports no http_code\n{self.dump_stat(x)}'
474
assert x['http_code'] == http_status, \
475
f'status #{idx} http_code: expected {http_status}, '\
476
f'got {x["http_code"]}\n{self.dump_stat(x)}'
477
if exitcode is not None:
478
for idx, x in enumerate(self.stats):
479
if 'exitcode' in x:
480
assert x['exitcode'] in exitcode, \
481
f'status #{idx} exitcode: expected {exitcode}, '\
482
f'got {x["exitcode"]}\n{self.dump_stat(x)}'
483
if remote_port is not None:
484
for idx, x in enumerate(self.stats):
485
assert 'remote_port' in x, f'remote_port missing\n{self.dump_stat(x)}'
486
assert x['remote_port'] == remote_port, \
487
f'status #{idx} remote_port: expected {remote_port}, '\
488
f'got {x["remote_port"]}\n{self.dump_stat(x)}'
489
if remote_ip is not None:
490
for idx, x in enumerate(self.stats):
491
assert 'remote_ip' in x, f'remote_ip missing\n{self.dump_stat(x)}'
492
assert x['remote_ip'] == remote_ip, \
493
f'status #{idx} remote_ip: expected {remote_ip}, '\
494
f'got {x["remote_ip"]}\n{self.dump_stat(x)}'
495
496
def dump_logs(self):
497
lines = ['>>--stdout ----------------------------------------------\n']
498
lines.extend(self._stdout)
499
lines.append('>>--stderr ----------------------------------------------\n')
500
lines.extend(self._stderr)
501
lines.append('<<-------------------------------------------------------\n')
502
return ''.join(lines)
503
504
def dump_stat(self, x):
505
lines = [
506
'json stat from curl:',
507
json.JSONEncoder(indent=2).encode(x),
508
]
509
if 'xfer_id' in x:
510
xfer_id = x['xfer_id']
511
lines.append(f'>>--xfer {xfer_id} trace:\n')
512
lines.extend(self.xfer_trace_for(xfer_id))
513
else:
514
lines.append('>>--full trace-------------------------------------------\n')
515
lines.extend(self._stderr)
516
lines.append('<<-------------------------------------------------------\n')
517
return ''.join(lines)
518
519
def xfer_trace_for(self, xfer_id) -> List[str]:
520
pat = re.compile(f'^[^[]* \\[{xfer_id}-.*$')
521
return [line for line in self._stderr if pat.match(line)]
522
523
524
class CurlClient:
525
526
ALPN_ARG = {
527
'http/0.9': '--http0.9',
528
'http/1.0': '--http1.0',
529
'http/1.1': '--http1.1',
530
'h2': '--http2',
531
'h2c': '--http2',
532
'h3': '--http3-only',
533
}
534
535
def __init__(self, env: Env,
536
run_dir: Optional[str] = None,
537
timeout: Optional[float] = None,
538
silent: bool = False,
539
run_env: Optional[Dict[str, str]] = None,
540
server_addr: Optional[str] = None,
541
with_dtrace: bool = False,
542
with_perf: bool = False,
543
with_flame: bool = False,
544
socks_args: Optional[List[str]] = None):
545
self.env = env
546
self._timeout = timeout if timeout else env.test_timeout
547
self._curl = os.environ['CURL'] if 'CURL' in os.environ else env.curl
548
self._run_dir = run_dir if run_dir else os.path.join(env.gen_dir, 'curl')
549
self._stdoutfile = f'{self._run_dir}/curl.stdout'
550
self._stderrfile = f'{self._run_dir}/curl.stderr'
551
self._headerfile = f'{self._run_dir}/curl.headers'
552
self._log_path = f'{self._run_dir}/curl.log'
553
self._with_dtrace = with_dtrace
554
self._with_perf = with_perf
555
self._with_flame = with_flame
556
self._fg_dir = None
557
if self._with_flame:
558
self._fg_dir = os.path.join(self.env.project_dir, '../FlameGraph')
559
if 'FLAMEGRAPH' in os.environ:
560
self._fg_dir = os.environ['FLAMEGRAPH']
561
if not os.path.exists(self._fg_dir):
562
raise Exception(f'FlameGraph checkout not found in {self._fg_dir}, set env variable FLAMEGRAPH')
563
if sys.platform.startswith('linux'):
564
self._with_perf = True
565
elif sys.platform.startswith('darwin'):
566
self._with_dtrace = True
567
else:
568
raise Exception(f'flame graphs unsupported on {sys.platform}')
569
self._socks_args = socks_args
570
self._silent = silent
571
self._run_env = run_env
572
self._server_addr = server_addr if server_addr else '127.0.0.1'
573
self._rmrf(self._run_dir)
574
self._mkpath(self._run_dir)
575
576
@property
577
def run_dir(self) -> str:
578
return self._run_dir
579
580
def download_file(self, i: int) -> str:
581
return os.path.join(self.run_dir, f'download_{i}.data')
582
583
def _rmf(self, path):
584
if os.path.exists(path):
585
return os.remove(path)
586
587
def _rmrf(self, path):
588
if os.path.exists(path):
589
return shutil.rmtree(path)
590
591
def _mkpath(self, path):
592
if not os.path.exists(path):
593
return os.makedirs(path)
594
595
def get_proxy_args(self, proto: str = 'http/1.1',
596
proxys: bool = True, tunnel: bool = False,
597
use_ip: bool = False):
598
proxy_name = self._server_addr if use_ip else self.env.proxy_domain
599
if proxys:
600
pport = self.env.pts_port(proto) if tunnel else self.env.proxys_port
601
xargs = [
602
'--proxy', f'https://{proxy_name}:{pport}/',
603
'--resolve', f'{proxy_name}:{pport}:{self._server_addr}',
604
'--proxy-cacert', self.env.ca.cert_file,
605
]
606
if proto == 'h2':
607
xargs.append('--proxy-http2')
608
else:
609
xargs = [
610
'--proxy', f'http://{proxy_name}:{self.env.proxy_port}/',
611
'--resolve', f'{proxy_name}:{self.env.proxy_port}:{self._server_addr}',
612
]
613
if tunnel:
614
xargs.append('--proxytunnel')
615
return xargs
616
617
def http_get(self, url: str, extra_args: Optional[List[str]] = None,
618
alpn_proto: Optional[str] = None,
619
def_tracing: bool = True,
620
with_stats: bool = False,
621
with_profile: bool = False,
622
with_tcpdump: bool = False):
623
return self._raw(url, options=extra_args,
624
with_stats=with_stats,
625
alpn_proto=alpn_proto,
626
def_tracing=def_tracing,
627
with_profile=with_profile,
628
with_tcpdump=with_tcpdump)
629
630
def http_download(self, urls: List[str],
631
alpn_proto: Optional[str] = None,
632
with_stats: bool = True,
633
with_headers: bool = False,
634
with_profile: bool = False,
635
with_tcpdump: bool = False,
636
no_save: bool = False,
637
limit_rate: Optional[str] = None,
638
extra_args: Optional[List[str]] = None):
639
if extra_args is None:
640
extra_args = []
641
if no_save:
642
extra_args.extend(['--out-null'])
643
else:
644
extra_args.extend(['-o', 'download_#1.data'])
645
if limit_rate:
646
extra_args.extend(['--limit-rate', limit_rate])
647
# remove any existing ones
648
for i in range(100):
649
self._rmf(self.download_file(i))
650
if with_stats:
651
extra_args.extend([
652
'-w', '%{json}\\n'
653
])
654
return self._raw(urls, alpn_proto=alpn_proto, options=extra_args,
655
with_stats=with_stats,
656
with_headers=with_headers,
657
with_profile=with_profile,
658
with_tcpdump=with_tcpdump)
659
660
def http_upload(self, urls: List[str], data: str,
661
alpn_proto: Optional[str] = None,
662
with_stats: bool = True,
663
with_headers: bool = False,
664
with_profile: bool = False,
665
with_tcpdump: bool = False,
666
extra_args: Optional[List[str]] = None):
667
if extra_args is None:
668
extra_args = []
669
extra_args.extend([
670
'--data-binary', data, '-o', 'download_#1.data',
671
])
672
if with_stats:
673
extra_args.extend([
674
'-w', '%{json}\\n'
675
])
676
return self._raw(urls, alpn_proto=alpn_proto, options=extra_args,
677
with_stats=with_stats,
678
with_headers=with_headers,
679
with_profile=with_profile,
680
with_tcpdump=with_tcpdump)
681
682
def http_delete(self, urls: List[str],
683
alpn_proto: Optional[str] = None,
684
with_stats: bool = True,
685
with_profile: bool = False,
686
extra_args: Optional[List[str]] = None):
687
if extra_args is None:
688
extra_args = []
689
extra_args.extend([
690
'-X', 'DELETE', '--out-null',
691
])
692
if with_stats:
693
extra_args.extend([
694
'-w', '%{json}\\n'
695
])
696
return self._raw(urls, alpn_proto=alpn_proto, options=extra_args,
697
with_stats=with_stats,
698
with_headers=False,
699
with_profile=with_profile)
700
701
def http_put(self, urls: List[str], data=None, fdata=None,
702
alpn_proto: Optional[str] = None,
703
with_stats: bool = True,
704
with_headers: bool = False,
705
with_profile: bool = False,
706
extra_args: Optional[List[str]] = None):
707
if extra_args is None:
708
extra_args = []
709
if fdata is not None:
710
extra_args.extend(['-T', fdata])
711
elif data is not None:
712
extra_args.extend(['-T', '-'])
713
extra_args.extend([
714
'-o', 'download_#1.data',
715
])
716
if with_stats:
717
extra_args.extend([
718
'-w', '%{json}\\n'
719
])
720
return self._raw(urls, intext=data,
721
alpn_proto=alpn_proto, options=extra_args,
722
with_stats=with_stats,
723
with_headers=with_headers,
724
with_profile=with_profile)
725
726
def http_form(self, urls: List[str], form: Dict[str, str],
727
alpn_proto: Optional[str] = None,
728
with_stats: bool = True,
729
with_headers: bool = False,
730
extra_args: Optional[List[str]] = None):
731
if extra_args is None:
732
extra_args = []
733
for key, val in form.items():
734
extra_args.extend(['-F', f'{key}={val}'])
735
extra_args.extend([
736
'-o', 'download_#1.data',
737
])
738
if with_stats:
739
extra_args.extend([
740
'-w', '%{json}\\n'
741
])
742
return self._raw(urls, alpn_proto=alpn_proto, options=extra_args,
743
with_stats=with_stats,
744
with_headers=with_headers)
745
746
def ftp_get(self, urls: List[str],
747
with_stats: bool = True,
748
with_profile: bool = False,
749
with_tcpdump: bool = False,
750
no_save: bool = False,
751
extra_args: Optional[List[str]] = None):
752
if extra_args is None:
753
extra_args = []
754
if no_save:
755
extra_args.extend([
756
'--out-null',
757
])
758
else:
759
extra_args.extend([
760
'-o', 'download_#1.data',
761
])
762
# remove any existing ones
763
for i in range(100):
764
self._rmf(self.download_file(i))
765
if with_stats:
766
extra_args.extend([
767
'-w', '%{json}\\n'
768
])
769
return self._raw(urls, options=extra_args,
770
with_stats=with_stats,
771
with_headers=False,
772
with_profile=with_profile,
773
with_tcpdump=with_tcpdump)
774
775
def ftp_ssl_get(self, urls: List[str],
776
with_stats: bool = True,
777
with_profile: bool = False,
778
with_tcpdump: bool = False,
779
no_save: bool = False,
780
extra_args: Optional[List[str]] = None):
781
if extra_args is None:
782
extra_args = []
783
extra_args.extend([
784
'--ssl-reqd',
785
])
786
return self.ftp_get(urls=urls, with_stats=with_stats,
787
with_profile=with_profile, no_save=no_save,
788
with_tcpdump=with_tcpdump,
789
extra_args=extra_args)
790
791
def ftp_upload(self, urls: List[str],
792
fupload: Optional[Any] = None,
793
updata: Optional[str] = None,
794
with_stats: bool = True,
795
with_profile: bool = False,
796
with_tcpdump: bool = False,
797
extra_args: Optional[List[str]] = None):
798
if extra_args is None:
799
extra_args = []
800
if fupload is not None:
801
extra_args.extend([
802
'--upload-file', fupload
803
])
804
elif updata is not None:
805
extra_args.extend([
806
'--upload-file', '-'
807
])
808
else:
809
raise Exception('need either file or data to upload')
810
if with_stats:
811
extra_args.extend([
812
'-w', '%{json}\\n'
813
])
814
return self._raw(urls, options=extra_args,
815
intext=updata,
816
with_stats=with_stats,
817
with_headers=False,
818
with_profile=with_profile,
819
with_tcpdump=with_tcpdump)
820
821
def ftp_ssl_upload(self, urls: List[str],
822
fupload: Optional[Any] = None,
823
updata: Optional[str] = None,
824
with_stats: bool = True,
825
with_profile: bool = False,
826
with_tcpdump: bool = False,
827
extra_args: Optional[List[str]] = None):
828
if extra_args is None:
829
extra_args = []
830
extra_args.extend([
831
'--ssl-reqd',
832
])
833
return self.ftp_upload(urls=urls, fupload=fupload, updata=updata,
834
with_stats=with_stats, with_profile=with_profile,
835
with_tcpdump=with_tcpdump,
836
extra_args=extra_args)
837
838
def response_file(self, idx: int):
839
return os.path.join(self._run_dir, f'download_{idx}.data')
840
841
def run_direct(self, args, with_stats: bool = False, with_profile: bool = False):
842
my_args = [self._curl]
843
if with_stats:
844
my_args.extend([
845
'-w', '%{json}\\n'
846
])
847
my_args.extend([
848
'-o', 'download.data',
849
])
850
my_args.extend(args)
851
return self._run(args=my_args, with_stats=with_stats, with_profile=with_profile)
852
853
def _run(self, args, intext='', with_stats: bool = False,
854
with_profile: bool = True, with_tcpdump: bool = False):
855
self._rmf(self._stdoutfile)
856
self._rmf(self._stderrfile)
857
self._rmf(self._headerfile)
858
exception = None
859
profile = None
860
tcpdump = None
861
perf = None
862
dtrace = None
863
if with_tcpdump:
864
tcpdump = RunTcpDump(self.env, self._run_dir)
865
tcpdump.start()
866
started_at = datetime.now()
867
try:
868
with open(self._stdoutfile, 'w') as cout, open(self._stderrfile, 'w') as cerr:
869
if with_profile:
870
end_at = started_at + timedelta(seconds=self._timeout) \
871
if self._timeout else None
872
log.info(f'starting: {args}')
873
p = subprocess.Popen(args, stderr=cerr, stdout=cout,
874
cwd=self._run_dir, shell=False,
875
env=self._run_env)
876
profile = RunProfile(p.pid, started_at, self._run_dir)
877
if intext is not None and False:
878
p.communicate(input=intext.encode(), timeout=1)
879
if self._with_perf:
880
perf = PerfProfile(p.pid, self._run_dir)
881
perf.start()
882
elif self._with_dtrace:
883
dtrace = DTraceProfile(p.pid, self._run_dir)
884
dtrace.start()
885
ptimeout = 0.0
886
while True:
887
try:
888
p.wait(timeout=ptimeout)
889
break
890
except subprocess.TimeoutExpired:
891
if end_at and datetime.now() >= end_at:
892
p.kill()
893
raise subprocess.TimeoutExpired(cmd=args, timeout=self._timeout)
894
profile.sample()
895
ptimeout = 0.01
896
exitcode = p.returncode
897
profile.finish()
898
log.info(f'done: exit={exitcode}, profile={profile}')
899
else:
900
p = subprocess.run(args, stderr=cerr, stdout=cout,
901
cwd=self._run_dir, shell=False,
902
input=intext.encode() if intext else None,
903
timeout=self._timeout,
904
env=self._run_env)
905
exitcode = p.returncode
906
except subprocess.TimeoutExpired:
907
now = datetime.now()
908
duration = now - started_at
909
log.warning(f'Timeout at {now} after {duration.total_seconds()}s '
910
f'(configured {self._timeout}s): {args}')
911
exitcode = -1
912
exception = 'TimeoutExpired'
913
ended_at = datetime.now()
914
if tcpdump:
915
tcpdump.finish()
916
if perf:
917
perf.finish()
918
if dtrace:
919
dtrace.finish()
920
if self._with_flame:
921
self._generate_flame(args, dtrace=dtrace, perf=perf)
922
coutput = open(self._stdoutfile).readlines()
923
cerrput = open(self._stderrfile).readlines()
924
return ExecResult(args=args, exit_code=exitcode, exception=exception,
925
stdout=coutput, stderr=cerrput,
926
duration=ended_at - started_at,
927
with_stats=with_stats,
928
profile=profile, tcpdump=tcpdump)
929
930
def _raw(self, urls, intext='', timeout=None, options=None, insecure=False,
931
alpn_proto: Optional[str] = None,
932
force_resolve=True,
933
with_stats=False,
934
with_headers=True,
935
def_tracing=True,
936
with_profile=False,
937
with_tcpdump=False):
938
args = self._complete_args(
939
urls=urls, timeout=timeout, options=options, insecure=insecure,
940
alpn_proto=alpn_proto, force_resolve=force_resolve,
941
with_headers=with_headers, def_tracing=def_tracing)
942
r = self._run(args, intext=intext, with_stats=with_stats,
943
with_profile=with_profile, with_tcpdump=with_tcpdump)
944
if r.exit_code == 0 and with_headers:
945
self._parse_headerfile(self._headerfile, r=r)
946
return r
947
948
def _complete_args(self, urls, timeout=None, options=None,
949
insecure=False, force_resolve=True,
950
alpn_proto: Optional[str] = None,
951
with_headers: bool = True,
952
def_tracing: bool = True):
953
if not isinstance(urls, list):
954
urls = [urls]
955
956
if options is not None and '--resolve' in options:
957
force_resolve = False
958
959
args = [self._curl, "-s", "--path-as-is"]
960
if 'CURL_TEST_EVENT' in os.environ:
961
args.append('--test-event')
962
963
if self._socks_args:
964
args.extend(self._socks_args)
965
966
if with_headers:
967
args.extend(["-D", self._headerfile])
968
if def_tracing is not False and not self._silent:
969
args.extend(['-v', '--trace-ids', '--trace-time'])
970
if self.env.verbose > 1:
971
args.extend(['--trace-config', 'http/2,http/3,h2-proxy,h1-proxy'])
972
973
active_options = options
974
if options is not None and '--next' in options:
975
active_options = options[options.index('--next') + 1:]
976
977
for url in urls:
978
u = urlparse(urls[0])
979
if options:
980
args.extend(options)
981
if alpn_proto is not None:
982
if alpn_proto not in self.ALPN_ARG:
983
raise Exception(f'unknown ALPN protocol: "{alpn_proto}"')
984
args.append(self.ALPN_ARG[alpn_proto])
985
986
if u.scheme == 'http':
987
pass
988
elif insecure:
989
args.append('--insecure')
990
elif active_options and "--cacert" in active_options:
991
pass
992
elif u.hostname:
993
args.extend(["--cacert", self.env.ca.cert_file])
994
995
if force_resolve and u.hostname and u.hostname != 'localhost' \
996
and not re.match(r'^(\d+|\[|:).*', u.hostname):
997
port = u.port if u.port else 443
998
args.extend([
999
'--resolve', f'{u.hostname}:{port}:{self._server_addr}',
1000
])
1001
if timeout is not None and int(timeout) > 0:
1002
args.extend(["--connect-timeout", str(int(timeout))])
1003
args.append(url)
1004
return args
1005
1006
def _parse_headerfile(self, headerfile: str, r: Optional[ExecResult] = None) -> ExecResult:
1007
lines = open(headerfile).readlines()
1008
if r is None:
1009
r = ExecResult(args=[], exit_code=0, stdout=[], stderr=[])
1010
1011
response = None
1012
1013
def fin_response(resp):
1014
if resp:
1015
r.add_response(resp)
1016
1017
expected = ['status']
1018
for line in lines:
1019
line = line.strip()
1020
if re.match(r'^$', line):
1021
if 'trailer' in expected:
1022
# end of trailers
1023
fin_response(response)
1024
response = None
1025
expected = ['status']
1026
elif 'header' in expected:
1027
# end of header, another status or trailers might follow
1028
expected = ['status', 'trailer']
1029
else:
1030
assert False, f"unexpected line: '{line}'"
1031
continue
1032
if 'status' in expected:
1033
# log.debug("reading 1st response line: %s", line)
1034
m = re.match(r'^(\S+) (\d+)( .*)?$', line)
1035
if m:
1036
fin_response(response)
1037
response = {
1038
"protocol": m.group(1),
1039
"status": int(m.group(2)),
1040
"description": m.group(3),
1041
"header": {},
1042
"trailer": {},
1043
"body": r.outraw
1044
}
1045
expected = ['header']
1046
continue
1047
if 'trailer' in expected:
1048
m = re.match(r'^([^:]+):\s*(.*)$', line)
1049
if m:
1050
response['trailer'][m.group(1).lower()] = m.group(2)
1051
continue
1052
if 'header' in expected:
1053
m = re.match(r'^([^:]+):\s*(.*)$', line)
1054
if m:
1055
response['header'][m.group(1).lower()] = m.group(2)
1056
continue
1057
assert False, f"unexpected line: '{line}, expected: {expected}'"
1058
1059
fin_response(response)
1060
return r
1061
1062
def _perf_collapse(self, perf: PerfProfile, file_err):
1063
if not os.path.exists(perf.file):
1064
raise Exception(f'dtrace output file does not exist: {perf.file}')
1065
fg_collapse = os.path.join(self._fg_dir, 'stackcollapse-perf.pl')
1066
if not os.path.exists(fg_collapse):
1067
raise Exception(f'FlameGraph script not found: {fg_collapse}')
1068
stacks_collapsed = f'{perf.file}.collapsed'
1069
log.info(f'collapsing stacks into {stacks_collapsed}')
1070
with open(stacks_collapsed, 'w') as cout, open(file_err, 'w') as cerr:
1071
p = subprocess.run([
1072
fg_collapse, perf.file
1073
], stdout=cout, stderr=cerr, cwd=self._run_dir, shell=False)
1074
rc = p.returncode
1075
if rc != 0:
1076
raise Exception(f'{fg_collapse} returned error {rc}')
1077
return stacks_collapsed
1078
1079
def _dtrace_collapse(self, dtrace: DTraceProfile, file_err):
1080
if not os.path.exists(dtrace.file):
1081
raise Exception(f'dtrace output file does not exist: {dtrace.file}')
1082
fg_collapse = os.path.join(self._fg_dir, 'stackcollapse.pl')
1083
if not os.path.exists(fg_collapse):
1084
raise Exception(f'FlameGraph script not found: {fg_collapse}')
1085
stacks_collapsed = f'{dtrace.file}.collapsed'
1086
log.info(f'collapsing stacks into {stacks_collapsed}')
1087
with open(stacks_collapsed, 'w') as cout, open(file_err, 'a') as cerr:
1088
p = subprocess.run([
1089
fg_collapse, dtrace.file
1090
], stdout=cout, stderr=cerr, cwd=self._run_dir, shell=False)
1091
rc = p.returncode
1092
if rc != 0:
1093
raise Exception(f'{fg_collapse} returned error {rc}')
1094
return stacks_collapsed
1095
1096
def _generate_flame(self, curl_args: List[str],
1097
dtrace: Optional[DTraceProfile] = None,
1098
perf: Optional[PerfProfile] = None):
1099
fg_gen_flame = os.path.join(self._fg_dir, 'flamegraph.pl')
1100
file_svg = os.path.join(self._run_dir, 'curl.flamegraph.svg')
1101
if not os.path.exists(fg_gen_flame):
1102
raise Exception(f'FlameGraph script not found: {fg_gen_flame}')
1103
1104
log.info('waiting a sec for perf/dtrace to finish flushing')
1105
time.sleep(2)
1106
log.info('generating flame graph for this run')
1107
file_err = os.path.join(self._run_dir, 'curl.flamegraph.stderr')
1108
if perf:
1109
stacks_collapsed = self._perf_collapse(perf, file_err)
1110
elif dtrace:
1111
stacks_collapsed = self._dtrace_collapse(dtrace, file_err)
1112
else:
1113
raise Exception('no stacks measure given')
1114
1115
log.info(f'generating graph into {file_svg}')
1116
cmdline = ' '.join(curl_args)
1117
if len(cmdline) > 80:
1118
title = f'{cmdline[:80]}...'
1119
subtitle = f'...{cmdline[-80:]}'
1120
else:
1121
title = cmdline
1122
subtitle = ''
1123
with open(file_svg, 'w') as cout, open(file_err, 'a') as cerr:
1124
p = subprocess.run([
1125
fg_gen_flame, '--colors', 'green',
1126
'--title', title, '--subtitle', subtitle,
1127
stacks_collapsed
1128
], stdout=cout, stderr=cerr, cwd=self._run_dir, shell=False)
1129
rc = p.returncode
1130
if rc != 0:
1131
raise Exception(f'{fg_gen_flame} returned error {rc}')
1132
1133