Skip to content

Commit

Permalink
undo workaround (#29)
Browse files Browse the repository at this point in the history
* revert fix

It turns out python-tuf #2047 is not a bug after all, but rather a misconfiguration on the server-side:
The issue arises if the server sends a `Content-Encoding: gzip` header when it should *not* do so.

fixes #26

* clean up imports
  • Loading branch information
dennisvang authored Jul 22, 2022
1 parent 12ca6b2 commit a4c27db
Show file tree
Hide file tree
Showing 2 changed files with 11 additions and 32 deletions.
29 changes: 5 additions & 24 deletions src/tufup/client.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,11 +8,8 @@
from urllib import parse

import requests
from requests.adapters import ReadTimeoutError
from requests.auth import AuthBase
from tuf.api.exceptions import (
DownloadError, SlowRetrievalError, UnsignedMetadataError
)
from tuf.api.exceptions import DownloadError, UnsignedMetadataError
from tuf.api.metadata import TargetFile
import tuf.ngclient
# RequestsFetcher is "private", but we'll just have to live with that, for now.
Expand Down Expand Up @@ -345,23 +342,7 @@ def _get_session(self, url: str) -> requests.Session:
return session

def _chunks(self, response: "requests.Response") -> Iterator[bytes]:
"""
Override _chunks() to:
- prevent automatic decoding of gzip files (python-tuf issue #2047)
- call progress hook
todo: adapt, if necessary, when a fix for python-tuf #2047 is released
"""
try:
while True:
data = response.raw.read(
amt=self.chunk_size, decode_content=False
)
if not data:
break
self._progress(bytes_new=len(data))
yield data
except ReadTimeoutError as e:
raise SlowRetrievalError from e
finally:
response.close()
"""Call progress hook for every chunk."""
for data in super()._chunks(response=response):
self._progress(bytes_new=len(data))
yield data
14 changes: 6 additions & 8 deletions tests/test_client.py
Original file line number Diff line number Diff line change
Expand Up @@ -278,11 +278,10 @@ def test__chunks_without_progress_hook(self):
chunk_count = 10
chunks = [b'x' * chunk_size] * chunk_count

def mock_read(**kwargs):
if chunks:
return chunks.pop()
def mock_iter_content(*args):
yield from chunks

mock_response = Mock(raw=Mock(read=mock_read), close=Mock())
mock_response = Mock(iter_content=mock_iter_content, close=Mock())
fetcher = AuthRequestsFetcher()
fetcher.chunk_size = chunk_size
# _chunks should work even if attach_progress_hook was not called
Expand All @@ -297,11 +296,10 @@ def test__chunks_with_progress_hook(self):
chunk_count = 10
chunks = [b'x' * chunk_size] * chunk_count

def mock_read(**kwargs):
if chunks:
return chunks.pop()
def mock_iter_content(*args):
yield from chunks

mock_response = Mock(raw=Mock(read=mock_read), close=Mock())
mock_response = Mock(iter_content=mock_iter_content, close=Mock())
fetcher = AuthRequestsFetcher()
fetcher.chunk_size = chunk_size
# test custom progress hook
Expand Down

0 comments on commit a4c27db

Please sign in to comment.