Path: blob/main/external/curl/tests/http/test_02_download.py
2066 views
#!/usr/bin/env python31# -*- coding: utf-8 -*-2#***************************************************************************3# _ _ ____ _4# Project ___| | | | _ \| |5# / __| | | | |_) | |6# | (__| |_| | _ <| |___7# \___|\___/|_| \_\_____|8#9# Copyright (C) Daniel Stenberg, <[email protected]>, et al.10#11# This software is licensed as described in the file COPYING, which12# you should have received as part of this distribution. The terms13# are also available at https://curl.se/docs/copyright.html.14#15# You may opt to use, copy, modify, merge, publish, distribute and/or sell16# copies of the Software, and permit persons to whom the Software is17# furnished to do so, under the terms of the COPYING file.18#19# This software is distributed on an "AS IS" basis, WITHOUT WARRANTY OF ANY20# KIND, either express or implied.21#22# SPDX-License-Identifier: curl23#24###########################################################################25#26import difflib27import filecmp28import logging29import math30import os31import re32import sys33from datetime import timedelta34import pytest3536from testenv import Env, CurlClient, LocalClient373839log = logging.getLogger(__name__)404142class TestDownload:4344@pytest.fixture(autouse=True, scope='class')45def _class_scope(self, env, httpd):46indir = httpd.docs_dir47env.make_data_file(indir=indir, fname="data-10k", fsize=10*1024)48env.make_data_file(indir=indir, fname="data-100k", fsize=100*1024)49env.make_data_file(indir=indir, fname="data-1m", fsize=1024*1024)50env.make_data_file(indir=indir, fname="data-10m", fsize=10*1024*1024)51env.make_data_file(indir=indir, fname="data-50m", fsize=50*1024*1024)52env.make_data_gzipbomb(indir=indir, fname="bomb-100m.txt", fsize=100*1024*1024)5354# download 1 file55@pytest.mark.parametrize("proto", ['http/1.1', 'h2', 'h3'])56def test_02_01_download_1(self, env: Env, httpd, nghttpx, proto):57if proto == 'h3' and not env.have_h3():58pytest.skip("h3 not supported")59curl = CurlClient(env=env)60url = f'https://{env.authority_for(env.domain1, proto)}/data.json'61r = curl.http_download(urls=[url], alpn_proto=proto)62r.check_response(http_status=200)6364# download 2 files65@pytest.mark.parametrize("proto", ['http/1.1', 'h2', 'h3'])66def test_02_02_download_2(self, env: Env, httpd, nghttpx, proto):67if proto == 'h3' and not env.have_h3():68pytest.skip("h3 not supported")69curl = CurlClient(env=env)70url = f'https://{env.authority_for(env.domain1, proto)}/data.json?[0-1]'71r = curl.http_download(urls=[url], alpn_proto=proto)72r.check_response(http_status=200, count=2)7374# download 100 files sequentially75@pytest.mark.parametrize("proto", ['http/1.1', 'h2', 'h3'])76def test_02_03_download_sequential(self, env: Env, httpd, nghttpx, proto):77if proto == 'h3' and not env.have_h3():78pytest.skip("h3 not supported")79if (proto == 'http/1.1' or proto == 'h2') and env.curl_uses_lib('mbedtls') and \80sys.platform.startswith('darwin') and env.ci_run:81pytest.skip('mbedtls 3.6.3 fails this test on macOS CI runners')82count = 1083curl = CurlClient(env=env)84urln = f'https://{env.authority_for(env.domain1, proto)}/data.json?[0-{count-1}]'85r = curl.http_download(urls=[urln], alpn_proto=proto)86r.check_response(http_status=200, count=count, connect_count=1)8788# download 100 files parallel89@pytest.mark.parametrize("proto", ['http/1.1', 'h2', 'h3'])90def test_02_04_download_parallel(self, env: Env, httpd, nghttpx, proto):91if proto == 'h3' and not env.have_h3():92pytest.skip("h3 not supported")93if proto == 'h2' and env.curl_uses_lib('mbedtls') and \94sys.platform.startswith('darwin') and env.ci_run:95pytest.skip('mbedtls 3.6.3 fails this test on macOS CI runners')96count = 1097max_parallel = 598curl = CurlClient(env=env)99urln = f'https://{env.authority_for(env.domain1, proto)}/data.json?[0-{count-1}]'100r = curl.http_download(urls=[urln], alpn_proto=proto, extra_args=[101'--parallel', '--parallel-max', f'{max_parallel}'102])103r.check_response(http_status=200, count=count)104if proto == 'http/1.1':105# http/1.1 parallel transfers will open multiple connections106assert r.total_connects > 1, r.dump_logs()107else:108# http2 parallel transfers will use one connection (common limit is 100)109assert r.total_connects == 1, r.dump_logs()110111# download 500 files sequential112@pytest.mark.parametrize("proto", ['http/1.1', 'h2', 'h3'])113def test_02_05_download_many_sequential(self, env: Env, httpd, nghttpx, proto):114if proto == 'h3' and not env.have_h3():115pytest.skip("h3 not supported")116if proto == 'h3' and env.curl_uses_lib('msh3'):117pytest.skip("msh3 shaky here")118if proto == 'h2' and env.curl_uses_lib('mbedtls') and \119sys.platform.startswith('darwin') and env.ci_run:120pytest.skip('mbedtls 3.6.3 fails this test on macOS CI runners')121count = 200122curl = CurlClient(env=env)123urln = f'https://{env.authority_for(env.domain1, proto)}/data.json?[0-{count-1}]'124r = curl.http_download(urls=[urln], alpn_proto=proto)125r.check_response(http_status=200, count=count)126if proto == 'http/1.1':127# http/1.1 parallel transfers will open multiple connections128assert r.total_connects > 1, r.dump_logs()129else:130# http2 parallel transfers will use one connection (common limit is 100)131assert r.total_connects == 1, r.dump_logs()132133# download 500 files parallel134@pytest.mark.parametrize("proto", ['h2', 'h3'])135def test_02_06_download_many_parallel(self, env: Env, httpd, nghttpx, proto):136if proto == 'h3' and not env.have_h3():137pytest.skip("h3 not supported")138if proto == 'h2' and env.curl_uses_lib('mbedtls') and \139sys.platform.startswith('darwin') and env.ci_run:140pytest.skip('mbedtls 3.6.3 fails this test on macOS CI runners')141count = 200142max_parallel = 50143curl = CurlClient(env=env)144urln = f'https://{env.authority_for(env.domain1, proto)}/data.json?[000-{count-1}]'145r = curl.http_download(urls=[urln], alpn_proto=proto, extra_args=[146'--parallel', '--parallel-max', f'{max_parallel}'147])148r.check_response(http_status=200, count=count, connect_count=1)149150# download files parallel, check connection reuse/multiplex151@pytest.mark.parametrize("proto", ['h2', 'h3'])152def test_02_07_download_reuse(self, env: Env, httpd, nghttpx, proto):153if proto == 'h3' and not env.have_h3():154pytest.skip("h3 not supported")155count = 200156curl = CurlClient(env=env)157urln = f'https://{env.authority_for(env.domain1, proto)}/data.json?[0-{count-1}]'158r = curl.http_download(urls=[urln], alpn_proto=proto,159with_stats=True, extra_args=[160'--parallel', '--parallel-max', '200'161])162r.check_response(http_status=200, count=count)163# should have used at most 2 connections only (test servers allow 100 req/conn)164# it may be just 1 on slow systems where request are answered faster than165# curl can exhaust the capacity or if curl runs with address-sanitizer speed166assert r.total_connects <= 2, "h2 should use fewer connections here"167168# download files parallel with http/1.1, check connection not reused169@pytest.mark.parametrize("proto", ['http/1.1'])170def test_02_07b_download_reuse(self, env: Env, httpd, nghttpx, proto):171count = 6172curl = CurlClient(env=env)173urln = f'https://{env.authority_for(env.domain1, proto)}/data.json?[0-{count-1}]'174r = curl.http_download(urls=[urln], alpn_proto=proto,175with_stats=True, extra_args=[176'--parallel'177])178r.check_response(count=count, http_status=200)179# http/1.1 should have used count connections180assert r.total_connects == count, "http/1.1 should use this many connections"181182@pytest.mark.parametrize("proto", ['http/1.1', 'h2', 'h3'])183def test_02_08_1MB_serial(self, env: Env, httpd, nghttpx, proto):184if proto == 'h3' and not env.have_h3():185pytest.skip("h3 not supported")186count = 5187urln = f'https://{env.authority_for(env.domain1, proto)}/data-1m?[0-{count-1}]'188curl = CurlClient(env=env)189r = curl.http_download(urls=[urln], alpn_proto=proto)190r.check_response(count=count, http_status=200)191192@pytest.mark.parametrize("proto", ['h2', 'h3'])193def test_02_09_1MB_parallel(self, env: Env, httpd, nghttpx, proto):194if proto == 'h3' and not env.have_h3():195pytest.skip("h3 not supported")196count = 5197urln = f'https://{env.authority_for(env.domain1, proto)}/data-1m?[0-{count-1}]'198curl = CurlClient(env=env)199r = curl.http_download(urls=[urln], alpn_proto=proto, extra_args=[200'--parallel'201])202r.check_response(count=count, http_status=200)203204@pytest.mark.skipif(condition=Env().slow_network, reason="not suitable for slow network tests")205@pytest.mark.skipif(condition=Env().ci_run, reason="not suitable for CI runs")206@pytest.mark.parametrize("proto", ['http/1.1', 'h2', 'h3'])207def test_02_10_10MB_serial(self, env: Env, httpd, nghttpx, proto):208if proto == 'h3' and not env.have_h3():209pytest.skip("h3 not supported")210count = 3211urln = f'https://{env.authority_for(env.domain1, proto)}/data-10m?[0-{count-1}]'212curl = CurlClient(env=env)213r = curl.http_download(urls=[urln], alpn_proto=proto)214r.check_response(count=count, http_status=200)215216@pytest.mark.skipif(condition=Env().slow_network, reason="not suitable for slow network tests")217@pytest.mark.skipif(condition=Env().ci_run, reason="not suitable for CI runs")218@pytest.mark.parametrize("proto", ['h2', 'h3'])219def test_02_11_10MB_parallel(self, env: Env, httpd, nghttpx, proto):220if proto == 'h3' and not env.have_h3():221pytest.skip("h3 not supported")222if proto == 'h3' and env.curl_uses_lib('msh3'):223pytest.skip("msh3 stalls here")224count = 3225urln = f'https://{env.authority_for(env.domain1, proto)}/data-10m?[0-{count-1}]'226curl = CurlClient(env=env)227r = curl.http_download(urls=[urln], alpn_proto=proto, extra_args=[228'--parallel'229])230r.check_response(count=count, http_status=200)231232@pytest.mark.parametrize("proto", ['h2', 'h3'])233def test_02_12_head_serial_https(self, env: Env, httpd, nghttpx, proto):234if proto == 'h3' and not env.have_h3():235pytest.skip("h3 not supported")236count = 5237urln = f'https://{env.authority_for(env.domain1, proto)}/data-10m?[0-{count-1}]'238curl = CurlClient(env=env)239r = curl.http_download(urls=[urln], alpn_proto=proto, extra_args=[240'--head'241])242r.check_response(count=count, http_status=200)243244@pytest.mark.parametrize("proto", ['h2'])245def test_02_13_head_serial_h2c(self, env: Env, httpd, nghttpx, proto):246if proto == 'h3' and not env.have_h3():247pytest.skip("h3 not supported")248count = 5249urln = f'http://{env.domain1}:{env.http_port}/data-10m?[0-{count-1}]'250curl = CurlClient(env=env)251r = curl.http_download(urls=[urln], alpn_proto=proto, extra_args=[252'--head', '--http2-prior-knowledge', '--fail-early'253])254r.check_response(count=count, http_status=200)255256@pytest.mark.parametrize("proto", ['h2', 'h3'])257def test_02_14_not_found(self, env: Env, httpd, nghttpx, proto):258if proto == 'h3' and not env.have_h3():259pytest.skip("h3 not supported")260if proto == 'h3' and env.curl_uses_lib('msh3'):261pytest.skip("msh3 stalls here")262count = 5263urln = f'https://{env.authority_for(env.domain1, proto)}/not-found?[0-{count-1}]'264curl = CurlClient(env=env)265r = curl.http_download(urls=[urln], alpn_proto=proto, extra_args=[266'--parallel'267])268r.check_stats(count=count, http_status=404, exitcode=0,269remote_port=env.port_for(alpn_proto=proto),270remote_ip='127.0.0.1')271272@pytest.mark.parametrize("proto", ['h2', 'h3'])273def test_02_15_fail_not_found(self, env: Env, httpd, nghttpx, proto):274if proto == 'h3' and not env.have_h3():275pytest.skip("h3 not supported")276if proto == 'h3' and env.curl_uses_lib('msh3'):277pytest.skip("msh3 stalls here")278count = 5279urln = f'https://{env.authority_for(env.domain1, proto)}/not-found?[0-{count-1}]'280curl = CurlClient(env=env)281r = curl.http_download(urls=[urln], alpn_proto=proto, extra_args=[282'--fail'283])284r.check_stats(count=count, http_status=404, exitcode=22,285remote_port=env.port_for(alpn_proto=proto),286remote_ip='127.0.0.1')287288@pytest.mark.skipif(condition=Env().slow_network, reason="not suitable for slow network tests")289def test_02_20_h2_small_frames(self, env: Env, httpd, configures_httpd):290# Test case to reproduce content corruption as observed in291# https://github.com/curl/curl/issues/10525292# To reliably reproduce, we need an Apache httpd that supports293# setting smaller frame sizes. This is not released yet, we294# test if it works and back out if not.295httpd.set_extra_config(env.domain1, lines=[296'H2MaxDataFrameLen 1024',297])298if not httpd.reload_if_config_changed():299pytest.skip('H2MaxDataFrameLen not supported')300# ok, make 100 downloads with 2 parallel running and they301# are expected to stumble into the issue when using `lib/http2.c`302# from curl 7.88.0303count = 5304urln = f'https://{env.authority_for(env.domain1, "h2")}/data-1m?[0-{count-1}]'305curl = CurlClient(env=env)306r = curl.http_download(urls=[urln], alpn_proto="h2", extra_args=[307'--parallel', '--parallel-max', '2'308])309r.check_response(count=count, http_status=200)310srcfile = os.path.join(httpd.docs_dir, 'data-1m')311self.check_downloads(curl, srcfile, count)312313# download serial via lib client, pause/resume at different offsets314@pytest.mark.parametrize("pause_offset", [0, 10*1024, 100*1023, 640000])315@pytest.mark.parametrize("proto", ['http/1.1', 'h3'])316def test_02_21_lib_serial(self, env: Env, httpd, nghttpx, proto, pause_offset):317if proto == 'h3' and not env.have_h3():318pytest.skip("h3 not supported")319count = 2320docname = 'data-10m'321url = f'https://localhost:{env.https_port}/{docname}'322client = LocalClient(name='hx-download', env=env)323if not client.exists():324pytest.skip(f'example client not built: {client.name}')325r = client.run(args=[326'-n', f'{count}', '-P', f'{pause_offset}', '-V', proto, url327])328r.check_exit_code(0)329srcfile = os.path.join(httpd.docs_dir, docname)330self.check_downloads(client, srcfile, count)331332# h2 download parallel via lib client, pause/resume at different offsets333# debug-override stream window size to reproduce #16955334@pytest.mark.parametrize("pause_offset", [0, 10*1024, 100*1023, 640000])335@pytest.mark.parametrize("swin_max", [0, 10*1024])336def test_02_21_h2_lib_serial(self, env: Env, httpd, pause_offset, swin_max):337proto = 'h2'338count = 2339docname = 'data-10m'340url = f'https://localhost:{env.https_port}/{docname}'341run_env = os.environ.copy()342run_env['CURL_DEBUG'] = 'multi,http/2'343if swin_max > 0:344run_env['CURL_H2_STREAM_WIN_MAX'] = f'{swin_max}'345client = LocalClient(name='hx-download', env=env, run_env=run_env)346if not client.exists():347pytest.skip(f'example client not built: {client.name}')348r = client.run(args=[349'-n', f'{count}', '-P', f'{pause_offset}', '-V', proto, url350])351r.check_exit_code(0)352srcfile = os.path.join(httpd.docs_dir, docname)353self.check_downloads(client, srcfile, count)354355# download via lib client, several at a time, pause/resume356@pytest.mark.parametrize("pause_offset", [100*1023])357@pytest.mark.parametrize("proto", ['http/1.1', 'h2', 'h3'])358def test_02_22_lib_parallel_resume(self, env: Env, httpd, nghttpx, proto, pause_offset):359if proto == 'h3' and not env.have_h3():360pytest.skip("h3 not supported")361count = 2362max_parallel = 5363docname = 'data-10m'364url = f'https://localhost:{env.https_port}/{docname}'365client = LocalClient(name='hx-download', env=env)366if not client.exists():367pytest.skip(f'example client not built: {client.name}')368r = client.run(args=[369'-n', f'{count}', '-m', f'{max_parallel}',370'-P', f'{pause_offset}', '-V', proto, url371])372r.check_exit_code(0)373srcfile = os.path.join(httpd.docs_dir, docname)374self.check_downloads(client, srcfile, count)375376# download, several at a time, pause and abort paused377@pytest.mark.parametrize("proto", ['http/1.1', 'h2', 'h3'])378def test_02_23a_lib_abort_paused(self, env: Env, httpd, nghttpx, proto):379if proto == 'h3' and not env.have_h3():380pytest.skip("h3 not supported")381if proto == 'h3' and env.curl_uses_ossl_quic():382pytest.skip('OpenSSL QUIC fails here')383if proto == 'h3' and env.ci_run and env.curl_uses_lib('quiche'):384pytest.skip("fails in CI, but works locally for unknown reasons")385count = 10386max_parallel = 5387if proto in ['h2', 'h3']:388pause_offset = 64 * 1024389else:390pause_offset = 12 * 1024391docname = 'data-1m'392url = f'https://localhost:{env.https_port}/{docname}'393client = LocalClient(name='hx-download', env=env)394if not client.exists():395pytest.skip(f'example client not built: {client.name}')396r = client.run(args=[397'-n', f'{count}', '-m', f'{max_parallel}', '-a',398'-P', f'{pause_offset}', '-V', proto, url399])400r.check_exit_code(0)401srcfile = os.path.join(httpd.docs_dir, docname)402# downloads should be there, but not necessarily complete403self.check_downloads(client, srcfile, count, complete=False)404405# download, several at a time, abort after n bytes406@pytest.mark.parametrize("proto", ['http/1.1', 'h2', 'h3'])407def test_02_23b_lib_abort_offset(self, env: Env, httpd, nghttpx, proto):408if proto == 'h3' and not env.have_h3():409pytest.skip("h3 not supported")410if proto == 'h3' and env.curl_uses_ossl_quic():411pytest.skip('OpenSSL QUIC fails here')412if proto == 'h3' and env.ci_run and env.curl_uses_lib('quiche'):413pytest.skip("fails in CI, but works locally for unknown reasons")414count = 10415max_parallel = 5416if proto in ['h2', 'h3']:417abort_offset = 64 * 1024418else:419abort_offset = 12 * 1024420docname = 'data-1m'421url = f'https://localhost:{env.https_port}/{docname}'422client = LocalClient(name='hx-download', env=env)423if not client.exists():424pytest.skip(f'example client not built: {client.name}')425r = client.run(args=[426'-n', f'{count}', '-m', f'{max_parallel}', '-a',427'-A', f'{abort_offset}', '-V', proto, url428])429r.check_exit_code(42) # CURLE_ABORTED_BY_CALLBACK430srcfile = os.path.join(httpd.docs_dir, docname)431# downloads should be there, but not necessarily complete432self.check_downloads(client, srcfile, count, complete=False)433434# download, several at a time, abort after n bytes435@pytest.mark.parametrize("proto", ['http/1.1', 'h2', 'h3'])436def test_02_23c_lib_fail_offset(self, env: Env, httpd, nghttpx, proto):437if proto == 'h3' and not env.have_h3():438pytest.skip("h3 not supported")439if proto == 'h3' and env.curl_uses_ossl_quic():440pytest.skip('OpenSSL QUIC fails here')441if proto == 'h3' and env.ci_run and env.curl_uses_lib('quiche'):442pytest.skip("fails in CI, but works locally for unknown reasons")443count = 10444max_parallel = 5445if proto in ['h2', 'h3']:446fail_offset = 64 * 1024447else:448fail_offset = 12 * 1024449docname = 'data-1m'450url = f'https://localhost:{env.https_port}/{docname}'451client = LocalClient(name='hx-download', env=env)452if not client.exists():453pytest.skip(f'example client not built: {client.name}')454r = client.run(args=[455'-n', f'{count}', '-m', f'{max_parallel}', '-a',456'-F', f'{fail_offset}', '-V', proto, url457])458r.check_exit_code(23) # CURLE_WRITE_ERROR459srcfile = os.path.join(httpd.docs_dir, docname)460# downloads should be there, but not necessarily complete461self.check_downloads(client, srcfile, count, complete=False)462463# speed limited download464@pytest.mark.parametrize("proto", ['http/1.1', 'h2', 'h3'])465def test_02_24_speed_limit(self, env: Env, httpd, nghttpx, proto):466if proto == 'h3' and not env.have_h3():467pytest.skip("h3 not supported")468count = 1469url = f'https://{env.authority_for(env.domain1, proto)}/data-1m'470curl = CurlClient(env=env)471speed_limit = 384 * 1024472min_duration = math.floor((1024 * 1024)/speed_limit)473r = curl.http_download(urls=[url], alpn_proto=proto, extra_args=[474'--limit-rate', f'{speed_limit}'475])476r.check_response(count=count, http_status=200)477assert r.duration > timedelta(seconds=min_duration), \478f'rate limited transfer should take more than {min_duration}s, '\479f'not {r.duration}'480481# make extreme parallel h2 upgrades, check invalid conn reuse482# before protocol switch has happened483def test_02_25_h2_upgrade_x(self, env: Env, httpd):484url = f'http://localhost:{env.http_port}/data-100k'485client = LocalClient(name='h2-upgrade-extreme', env=env, timeout=15)486if not client.exists():487pytest.skip(f'example client not built: {client.name}')488r = client.run(args=[url])489assert r.exit_code == 0, f'{client.dump_logs()}'490491# Special client that tests TLS session reuse in parallel transfers492# TODO: just uses a single connection for h2/h3. Not sure how to prevent that493@pytest.mark.parametrize("proto", ['http/1.1', 'h2', 'h3'])494def test_02_26_session_shared_reuse(self, env: Env, proto, httpd, nghttpx):495url = f'https://{env.authority_for(env.domain1, proto)}/data-100k'496client = LocalClient(name='tls-session-reuse', env=env)497if not client.exists():498pytest.skip(f'example client not built: {client.name}')499r = client.run(args=[proto, url])500r.check_exit_code(0)501502# test on paused transfers, based on issue #11982503@pytest.mark.parametrize("proto", ['http/1.1', 'h2', 'h3'])504def test_02_27a_paused_no_cl(self, env: Env, httpd, nghttpx, proto):505url = f'https://{env.authority_for(env.domain1, proto)}' \506'/curltest/tweak/?&chunks=6&chunk_size=8000'507client = LocalClient(env=env, name='h2-pausing')508r = client.run(args=['-V', proto, url])509r.check_exit_code(0)510511# test on paused transfers, based on issue #11982512@pytest.mark.parametrize("proto", ['http/1.1', 'h2', 'h3'])513def test_02_27b_paused_no_cl(self, env: Env, httpd, nghttpx, proto):514url = f'https://{env.authority_for(env.domain1, proto)}' \515'/curltest/tweak/?error=502'516client = LocalClient(env=env, name='h2-pausing')517r = client.run(args=['-V', proto, url])518r.check_exit_code(0)519520# test on paused transfers, based on issue #11982521@pytest.mark.parametrize("proto", ['http/1.1', 'h2', 'h3'])522def test_02_27c_paused_no_cl(self, env: Env, httpd, nghttpx, proto):523url = f'https://{env.authority_for(env.domain1, proto)}' \524'/curltest/tweak/?status=200&chunks=1&chunk_size=100'525client = LocalClient(env=env, name='h2-pausing')526r = client.run(args=['-V', proto, url])527r.check_exit_code(0)528529@pytest.mark.parametrize("proto", ['http/1.1', 'h2', 'h3'])530def test_02_28_get_compressed(self, env: Env, httpd, nghttpx, proto):531if proto == 'h3' and not env.have_h3():532pytest.skip("h3 not supported")533count = 1534urln = f'https://{env.authority_for(env.domain1brotli, proto)}/data-100k?[0-{count-1}]'535curl = CurlClient(env=env)536r = curl.http_download(urls=[urln], alpn_proto=proto, extra_args=[537'--compressed'538])539r.check_exit_code(code=0)540r.check_response(count=count, http_status=200)541542def check_downloads(self, client, srcfile: str, count: int,543complete: bool = True):544for i in range(count):545dfile = client.download_file(i)546assert os.path.exists(dfile)547if complete and not filecmp.cmp(srcfile, dfile, shallow=False):548diff = "".join(difflib.unified_diff(a=open(srcfile).readlines(),549b=open(dfile).readlines(),550fromfile=srcfile,551tofile=dfile,552n=1))553assert False, f'download {dfile} differs:\n{diff}'554555# download via lib client, 1 at a time, pause/resume at different offsets556@pytest.mark.parametrize("pause_offset", [0, 10*1024, 100*1023, 640000])557@pytest.mark.parametrize("proto", ['http/1.1', 'h2', 'h3'])558def test_02_29_h2_lib_serial(self, env: Env, httpd, nghttpx, proto, pause_offset):559count = 2560docname = 'data-10m'561url = f'https://localhost:{env.https_port}/{docname}'562client = LocalClient(name='hx-download', env=env)563if not client.exists():564pytest.skip(f'example client not built: {client.name}')565r = client.run(args=[566'-n', f'{count}', '-P', f'{pause_offset}', '-V', proto, url567])568r.check_exit_code(0)569srcfile = os.path.join(httpd.docs_dir, docname)570self.check_downloads(client, srcfile, count)571572# download parallel with prior knowledge573def test_02_30_parallel_prior_knowledge(self, env: Env, httpd):574count = 3575curl = CurlClient(env=env)576urln = f'http://{env.domain1}:{env.http_port}/data.json?[0-{count-1}]'577r = curl.http_download(urls=[urln], extra_args=[578'--parallel', '--http2-prior-knowledge'579])580r.check_response(http_status=200, count=count)581assert r.total_connects == 1, r.dump_logs()582583# download parallel with h2 "Upgrade:"584def test_02_31_parallel_upgrade(self, env: Env, httpd, nghttpx):585count = 3586curl = CurlClient(env=env)587urln = f'http://{env.domain1}:{env.http_port}/data.json?[0-{count-1}]'588r = curl.http_download(urls=[urln], extra_args=[589'--parallel', '--http2'590])591r.check_response(http_status=200, count=count)592# we see up to 3 connections, because Apache wants to serve only a single593# request via Upgrade: and then closes the connection. But if a new594# request comes in time, it might still get served.595assert r.total_connects <= 3, r.dump_logs()596597# nghttpx is the only server we have that supports TLS early data598@pytest.mark.skipif(condition=not Env.have_nghttpx(), reason="no nghttpx")599@pytest.mark.parametrize("proto", ['http/1.1', 'h2', 'h3'])600def test_02_32_earlydata(self, env: Env, httpd, nghttpx, proto):601if not env.curl_can_early_data():602pytest.skip('TLS earlydata not implemented')603if proto == 'h3' and \604(not env.have_h3() or not env.curl_can_h3_early_data()):605pytest.skip("h3 not supported")606if proto != 'h3' and sys.platform.startswith('darwin') and env.ci_run:607pytest.skip('failing on macOS CI runners')608count = 2609docname = 'data-10k'610# we want this test to always connect to nghttpx, since it is611# the only server we have that supports TLS earlydata612port = env.port_for(proto)613if proto != 'h3':614port = env.nghttpx_https_port615url = f'https://{env.domain1}:{port}/{docname}'616client = LocalClient(name='hx-download', env=env)617if not client.exists():618pytest.skip(f'example client not built: {client.name}')619r = client.run(args=[620'-n', f'{count}',621'-e', # use TLS earlydata622'-f', # forbid reuse of connections623'-r', f'{env.domain1}:{port}:127.0.0.1',624'-V', proto, url625])626r.check_exit_code(0)627srcfile = os.path.join(httpd.docs_dir, docname)628self.check_downloads(client, srcfile, count)629# check that TLS earlydata worked as expected630earlydata = {}631reused_session = False632for line in r.trace_lines:633m = re.match(r'^\[t-(\d+)] EarlyData: (-?\d+)', line)634if m:635earlydata[int(m.group(1))] = int(m.group(2))636continue637m = re.match(r'\[1-1] \* SSL reusing session.*', line)638if m:639reused_session = True640assert reused_session, 'session was not reused for 2nd transfer'641assert earlydata[0] == 0, f'{earlydata}'642if proto == 'http/1.1':643assert earlydata[1] == 111, f'{earlydata}'644elif proto == 'h2':645assert earlydata[1] == 127, f'{earlydata}'646elif proto == 'h3':647assert earlydata[1] == 109, f'{earlydata}'648649@pytest.mark.parametrize("proto", ['http/1.1', 'h2'])650@pytest.mark.parametrize("max_host_conns", [0, 1, 5])651def test_02_33_max_host_conns(self, env: Env, httpd, nghttpx, proto, max_host_conns):652if not env.curl_is_debug():653pytest.skip('only works for curl debug builds')654if proto == 'h3' and not env.have_h3():655pytest.skip("h3 not supported")656count = 50657max_parallel = 50658docname = 'data-10k'659port = env.port_for(proto)660url = f'https://{env.domain1}:{port}/{docname}'661run_env = os.environ.copy()662run_env['CURL_DEBUG'] = 'multi'663client = LocalClient(name='hx-download', env=env, run_env=run_env)664if not client.exists():665pytest.skip(f'example client not built: {client.name}')666r = client.run(args=[667'-n', f'{count}',668'-m', f'{max_parallel}',669'-x', # always use a fresh connection670'-M', str(max_host_conns), # limit conns per host671'-r', f'{env.domain1}:{port}:127.0.0.1',672'-V', proto, url673])674r.check_exit_code(0)675srcfile = os.path.join(httpd.docs_dir, docname)676self.check_downloads(client, srcfile, count)677if max_host_conns > 0:678matched_lines = 0679for line in r.trace_lines:680m = re.match(r'.*The cache now contains (\d+) members.*', line)681if m:682matched_lines += 1683n = int(m.group(1))684assert n <= max_host_conns685assert matched_lines > 0686687@pytest.mark.parametrize("proto", ['http/1.1', 'h2'])688@pytest.mark.parametrize("max_total_conns", [0, 1, 5])689def test_02_34_max_total_conns(self, env: Env, httpd, nghttpx, proto, max_total_conns):690if not env.curl_is_debug():691pytest.skip('only works for curl debug builds')692if proto == 'h3' and not env.have_h3():693pytest.skip("h3 not supported")694count = 50695max_parallel = 50696docname = 'data-10k'697port = env.port_for(proto)698url = f'https://{env.domain1}:{port}/{docname}'699run_env = os.environ.copy()700run_env['CURL_DEBUG'] = 'multi'701client = LocalClient(name='hx-download', env=env, run_env=run_env)702if not client.exists():703pytest.skip(f'example client not built: {client.name}')704r = client.run(args=[705'-n', f'{count}',706'-m', f'{max_parallel}',707'-x', # always use a fresh connection708'-T', str(max_total_conns), # limit total connections709'-r', f'{env.domain1}:{port}:127.0.0.1',710'-V', proto, url711])712r.check_exit_code(0)713srcfile = os.path.join(httpd.docs_dir, docname)714self.check_downloads(client, srcfile, count)715if max_total_conns > 0:716matched_lines = 0717for line in r.trace_lines:718m = re.match(r'.*The cache now contains (\d+) members.*', line)719if m:720matched_lines += 1721n = int(m.group(1))722assert n <= max_total_conns723assert matched_lines > 0724725# 2 parallel transers, pause and resume. Load a 100 MB zip bomb from726# the server with "Content-Encoding: gzip" that gets exloded during727# response writing to the client. Client pauses after 1MB unzipped data728# and causes buffers to fill while the server sends more response729# data.730# * http/1.1: not much buffering is done as curl does no longer731# serve the connections that are paused732# * h2/h3: server continues sending what the stream window allows and733# since the one connection involved unpaused transfers, data continues734# to be received, requiring buffering.735@pytest.mark.parametrize("proto", ['http/1.1', 'h2', 'h3'])736def test_02_35_pause_bomb(self, env: Env, httpd, nghttpx, proto):737if proto == 'h3' and not env.have_h3():738pytest.skip("h3 not supported")739count = 2740pause_offset = 1024 * 1024741docname = 'bomb-100m.txt.var'742url = f'https://localhost:{env.https_port}/{docname}'743client = LocalClient(name='hx-download', env=env)744if not client.exists():745pytest.skip(f'example client not built: {client.name}')746r = client.run(args=[747'-n', f'{count}', '-m', f'{count}',748'-P', f'{pause_offset}', '-V', proto, url749])750r.check_exit_code(0)751752753