#!/usr/bin/env python31# -*- coding: utf-8 -*-2#***************************************************************************3# _ _ ____ _4# Project ___| | | | _ \| |5# / __| | | | |_) | |6# | (__| |_| | _ <| |___7# \___|\___/|_| \_\_____|8#9# Copyright (C) Daniel Stenberg, <[email protected]>, et al.10#11# This software is licensed as described in the file COPYING, which12# you should have received as part of this distribution. The terms13# are also available at https://curl.se/docs/copyright.html.14#15# You may opt to use, copy, modify, merge, publish, distribute and/or sell16# copies of the Software, and permit persons to whom the Software is17# furnished to do so, under the terms of the COPYING file.18#19# This software is distributed on an "AS IS" basis, WITHOUT WARRANTY OF ANY20# KIND, either express or implied.21#22# SPDX-License-Identifier: curl23#24###########################################################################25#26import logging27import time28from datetime import timedelta29from threading import Thread30import pytest3132from testenv import Env, CurlClient, ExecResult333435log = logging.getLogger(__name__)363738class TestGoAway:3940# download files sequentially with delay, reload server for GOAWAY41def test_03_01_h2_goaway(self, env: Env, httpd, nghttpx):42proto = 'h2'43count = 344self.r = None4546def long_run():47curl = CurlClient(env=env)48# send 10 chunks of 1024 bytes in a response body with 100ms delay in between49urln = f'https://{env.authority_for(env.domain1, proto)}' \50f'/curltest/tweak?id=[0-{count - 1}]'\51'&chunks=10&chunk_size=1024&chunk_delay=100ms'52self.r = curl.http_download(urls=[urln], alpn_proto=proto)5354t = Thread(target=long_run)55t.start()56# each request will take a second, reload the server in the middle57# of the first one.58time.sleep(1.5)59assert httpd.reload()60t.join()61r: ExecResult = self.r62r.check_response(count=count, http_status=200)63# reload will shut down the connection gracefully with GOAWAY64# we expect to see a second connection opened afterwards65assert r.total_connects == 266for idx, s in enumerate(r.stats):67if s['num_connects'] > 0:68log.debug(f'request {idx} connected')69# this should take `count` seconds to retrieve70assert r.duration >= timedelta(seconds=count)7172# download files sequentially with delay, reload server for GOAWAY73@pytest.mark.skipif(condition=not Env.have_h3(), reason="h3 not supported")74def test_03_02_h3_goaway(self, env: Env, httpd, nghttpx):75proto = 'h3'76if proto == 'h3' and env.curl_uses_ossl_quic():77pytest.skip('OpenSSL QUIC fails here')78count = 379self.r = None8081def long_run():82curl = CurlClient(env=env)83# send 10 chunks of 1024 bytes in a response body with 100ms delay in between84urln = f'https://{env.authority_for(env.domain1, proto)}' \85f'/curltest/tweak?id=[0-{count - 1}]'\86'&chunks=10&chunk_size=1024&chunk_delay=100ms'87self.r = curl.http_download(urls=[urln], alpn_proto=proto)8889t = Thread(target=long_run)90t.start()91# each request will take a second, reload the server in the middle92# of the first one.93time.sleep(1.5)94assert nghttpx.reload(timeout=timedelta(seconds=Env.SERVER_TIMEOUT))95t.join()96r: ExecResult = self.r97# this should take `count` seconds to retrieve, maybe a little less98assert r.duration >= timedelta(seconds=count-1)99r.check_response(count=count, http_status=200, connect_count=2)100# reload will shut down the connection gracefully with GOAWAY101# we expect to see a second connection opened afterwards102for idx, s in enumerate(r.stats):103if s['num_connects'] > 0:104log.debug(f'request {idx} connected')105106# download files sequentially with delay, reload server for GOAWAY107def test_03_03_h1_goaway(self, env: Env, httpd, nghttpx):108proto = 'http/1.1'109count = 3110self.r = None111112def long_run():113curl = CurlClient(env=env)114# send 10 chunks of 1024 bytes in a response body with 100ms delay in between115# pause 2 seconds between requests116urln = f'https://{env.authority_for(env.domain1, proto)}' \117f'/curltest/tweak?id=[0-{count - 1}]'\118'&chunks=10&chunk_size=1024&chunk_delay=100ms'119self.r = curl.http_download(urls=[urln], alpn_proto=proto, extra_args=[120'--rate', '30/m',121])122123t = Thread(target=long_run)124t.start()125# each request will take a second, reload the server in the middle126# of the first one.127time.sleep(1.5)128assert httpd.reload()129t.join()130r: ExecResult = self.r131r.check_response(count=count, http_status=200, connect_count=2)132# reload will shut down the connection gracefully133# we expect to see a second connection opened afterwards134for idx, s in enumerate(r.stats):135if s['num_connects'] > 0:136log.debug(f'request {idx} connected')137# this should take `count` seconds to retrieve138assert r.duration >= timedelta(seconds=count)139140141