Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Add more unittests for asyncio stream mode #185

Open
yifeikong opened this issue Dec 22, 2023 · 3 comments
Open

Add more unittests for asyncio stream mode #185

yifeikong opened this issue Dec 22, 2023 · 3 comments
Assignees
Labels
good first issue Good for newcomers help wanted Extra attention is needed
Milestone

Comments

@yifeikong
Copy link
Owner

It's not well covered.

@yifeikong yifeikong added the help wanted Extra attention is needed label Dec 22, 2023
@yifeikong yifeikong added this to the v0.6 milestone Dec 22, 2023
@gmottajr
Copy link

Hi @yifeikong, I would like to take this up.
Could you please give me more details, need a little bit of more context?

@yifeikong
Copy link
Owner Author

There are a number of tests for stream mode in sync version.

def test_stream_iter_content(server):
with requests.Session() as s:
url = str(server.url.copy_with(path="/stream"))
with s.stream("GET", url, params={"n": "20"}) as r:
for chunk in r.iter_content():
assert b"path" in chunk
def test_stream_iter_content_break(server):
with requests.Session() as s:
url = str(server.url.copy_with(path="/stream"))
with s.stream("GET", url, params={"n": "20"}) as r:
for idx, chunk in enumerate(r.iter_content()):
assert b"path" in chunk
if idx == 3:
break
assert r.status_code == 200
def test_stream_iter_lines(server):
with requests.Session() as s:
url = str(server.url.copy_with(path="/stream"))
with s.stream("GET", url, params={"n": "20"}) as r:
for chunk in r.iter_lines():
data = json.loads(chunk)
assert data["path"] == "/stream"
def test_stream_status_code(server):
with requests.Session() as s:
url = str(server.url.copy_with(path="/stream"))
with s.stream("GET", url, params={"n": "20"}) as r:
assert r.status_code == 200
def test_stream_empty_body(server):
with requests.Session() as s:
url = str(server.url.copy_with(path="/empty_body"))
with s.stream("GET", url) as r:
assert r.status_code == 200
# def test_stream_large_body(server):
# with requests.Session() as s:
# url = str(server.url.copy_with(path="/stream"))
# with s.stream("GET", url, params={"n": "100000"}) as r:
# for chunk in r.iter_lines():
# data = json.loads(chunk)
# assert data["path"] == "/stream"
# # print(data["path"])
# assert r.status_code == 200
def test_stream_incomplete_read(server):
with requests.Session() as s:
url = str(server.url.copy_with(path="/incomplete_read"))
with pytest.raises(requests.RequestsError) as e:
with s.stream("GET", url) as r:
for _ in r.iter_content():
continue
assert e.value.code == CurlECode.PARTIAL_FILE
def test_stream_incomplete_read_without_close(server):
with requests.Session() as s:
url = str(server.url.copy_with(path="/incomplete_read"))
with pytest.raises(requests.RequestsError) as e:
r = s.get(url, stream=True)
# The error will only be raised when you try to read it.
for _ in r.iter_content():
continue
assert e.value.code == CurlECode.PARTIAL_FILE
def test_stream_redirect_loop(server):
with requests.Session() as s:
url = str(server.url.copy_with(path="/redirect_loop"))
with pytest.raises(requests.RequestsError) as e:
with s.stream("GET", url, max_redirects=2):
pass
assert e.value.code == CurlECode.TOO_MANY_REDIRECTS
assert e.value.response.status_code == 301 # type: ignore
def test_stream_redirect_loop_without_close(server):
with requests.Session() as s:
url = str(server.url.copy_with(path="/redirect_loop"))
with pytest.raises(requests.RequestsError) as e:
# if the error happens receiving header, it's raised right away
s.get(url, max_redirects=2, stream=True)
assert e.value.code == CurlECode.TOO_MANY_REDIRECTS
assert e.value.response.status_code == 301 # type: ignore
def test_stream_auto_close_plain(server):
s = requests.Session()
url = str(server.url.copy_with(path="/stream"))
s.get(url, stream=True)
url = str(server.url.copy_with(path="/"))
s.get(url)
def test_stream_auto_close_with_content_errors(server):
s = requests.Session()
# Silently fails, since the content is not read at all.
url = str(server.url.copy_with(path="/incomplete_read"))
s.get(url, stream=True)
url = str(server.url.copy_with(path="/"))
s.get(url, stream=True)
def test_stream_auto_close_with_header_errors(server):
s = requests.Session()
url = str(server.url.copy_with(path="/redirect_loop"))
with pytest.raises(requests.RequestsError) as e:
s.get(url, max_redirects=2, stream=True)
assert e.value.code == CurlECode.TOO_MANY_REDIRECTS
assert e.value.response.status_code == 301 # type: ignore
url = str(server.url.copy_with(path="/"))
s.get(url, stream=True)
def test_stream_options_persist(server):
s = requests.Session()
# set here instead of when requesting
s.curl.setopt(CurlOpt.USERAGENT, b"foo/1.0")
url = str(server.url.copy_with(path="/echo_headers"))
r = s.get(url, stream=True)
buffer = []
for line in r.iter_lines():
buffer.append(line)
data = json.loads(b"".join(buffer))
assert data["User-agent"][0] == "foo/1.0"
def test_stream_close_early(server):
s = requests.Session()
# url = str(server.url.copy_with(path="/large"))
# from http://xcal1.vodafone.co.uk/
url = "http://212.183.159.230/200MB.zip"
r = s.get(url, max_recv_speed=1024 * 1024, stream=True)
counter = 0
start = time.time()
for _ in r.iter_content():
counter += 1
if counter > 10:
break
r.close()
end = time.time()
assert end - start < 50
# Does not work
# def test_max_recv_speed(server):
# s = requests.Session()
# s.curl.setopt(CurlOpt.BUFFERSIZE, 1024 * 1024)
# url = str(server.url.copy_with(path="/large"))
# # from http://xcal1.vodafone.co.uk/
# url = "http://212.183.159.230/200MB.zip"
# start = time.time()
# r = s.get(url, max_recv_speed=10 * 1024 * 1024)
# end = time.time()
# # assert len(r.content) == 20 * 1024 * 1024
# assert end - start > 10
def test_curl_infos(server):
s = requests.Session(curl_infos=[CurlInfo.PRIMARY_IP])
r = s.get(str(server.url))
assert r.infos[CurlInfo.PRIMARY_IP] == b"127.0.0.1"

But the async tests are not matched.

async def test_stream_iter_content(server):
async with AsyncSession() as s:
url = str(server.url.copy_with(path="/stream"))
async with s.stream("GET", url, params={"n": "20"}) as r:
async for chunk in r.aiter_content():
assert b"path" in chunk
async def test_stream_iter_content_break(server):
async with AsyncSession() as s:
url = str(server.url.copy_with(path="/stream"))
async with s.stream("GET", url, params={"n": "20"}) as r:
idx = 0
async for chunk in r.aiter_content():
idx += 1
assert b"path" in chunk
if idx == 3:
break
assert r.status_code == 200
async def test_stream_iter_lines(server):
async with AsyncSession() as s:
url = str(server.url.copy_with(path="/stream"))
async with s.stream("GET", url, params={"n": "20"}) as r:
async for chunk in r.aiter_lines():
data = json.loads(chunk)
assert data["path"] == "/stream"
async def test_stream_status_code(server):
async with AsyncSession() as s:
url = str(server.url.copy_with(path="/stream"))
async with s.stream("GET", url, params={"n": "20"}) as r:
assert r.status_code == 200
async def test_stream_empty_body(server):
async with AsyncSession() as s:
url = str(server.url.copy_with(path="/empty_body"))
async with s.stream("GET", url) as r:
assert r.status_code == 200
async def test_stream_atext(server):
async with AsyncSession() as s:
url = str(server.url.copy_with(path="/stream"))
async with s.stream("GET", url, params={"n": "20"}) as r:
text = await r.atext()
chunks = text.split("\n")
assert len(chunks) == 20

@gmottajr
Copy link

Ok, sounds good! 🚀
Shall I have it assigned it to me, please?
I will gladly work on it right after I have it. 🧐
I am happy to contribute to improving the test suite for this critical aspect of the project. My approach would involve thoroughly understanding the existing sync mode tests, identifying the gaps in the async mode tests, and then crafting robust and comprehensive tests to cover these areas effectively. 🔍
Thank you for considering my request. 🙏

Best regards!

@yifeikong yifeikong added the good first issue Good for newcomers label Jan 1, 2024
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment
Labels
good first issue Good for newcomers help wanted Extra attention is needed
Projects
Status: Todo
Development

No branches or pull requests

2 participants