Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

fix: invalid compressed file for huge file #11

Merged
merged 2 commits into from
Mar 1, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
15 changes: 15 additions & 0 deletions pdbstore/io/file.py
Original file line number Diff line number Diff line change
Expand Up @@ -287,3 +287,18 @@ def _explore_dirs(rootdir: str, recursive: bool = False) -> List[Path]:
files_list.append(Path(file))

return files_list


def get_file_size(path: PathLike) -> int:
"""Get file size

:param path: The file path
:return: The file size
"""
if not path:
return 0
file_path: Path = util.str_to_path(path)
if not file_path or not file_path.exists():
return 0

return file_path.stat().st_size
12 changes: 12 additions & 0 deletions pdbstore/store/entry.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,9 @@
class TransactionEntry:
"""A SymbolStore transaction entry representation"""

# File size limit to disable compression
MAX_COMPRESSED_FILE_SIZE: int = 2147482624

def __init__(
self,
store: "Store", # type: ignore[name-defined] # noqa: F821
Expand Down Expand Up @@ -102,6 +105,15 @@ def commit(self, force: Optional[bool] = False) -> bool:
if not dest_dir.is_dir():
dest_dir.mkdir(parents=True)

if self.compressed:
# Sanity check to limit compression for file having size with less than 2GB
# 2GB is the limit of cab files as per Microsoft documentation
if self.MAX_COMPRESSED_FILE_SIZE < io.file.get_file_size(self.source_file):
self.compressed = False
PDBStoreOutput().warning(
f"Disable compression for {self.source_file} since file size is more than 2GB"
)

if self.compressed:
PDBStoreOutput().debug(
f"Compressing {self.source_file} to {str(dest_dir / (self.file_name[:-1] + '_'))}"
Expand Down
18 changes: 17 additions & 1 deletion tests/unit/test_file.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@

from pdbstore.exceptions import ReadFileError
from pdbstore.io import file
from pdbstore.typing import Generator

NEWLINES_FILE_CONTENT = "first line\nsecond line"

Expand All @@ -13,7 +14,7 @@


@pytest.fixture(name="file_access")
def fixture_file_access(tmp_path, request) -> Path:
def fixture_file_access(tmp_path, request) -> Generator[Path, None, None]:
"""Generate temporary history file"""
dest = tmp_path / f"file-{time.time()}.bin"
with open(dest, "wb") as hfp:
Expand Down Expand Up @@ -84,3 +85,18 @@ def test_text_file_with_split_windows(file_access):

content = file.read_text_file(file_access, True)
assert content == NEWLINES_FILE_CONTENT.split("\n")


@pytest.mark.parametrize("file_access", [[TEXT_FILE_WITH_CRLF]], indirect=True)
def test_valid_file_size(file_access):
"""test valid file size behavior"""
assert file.get_file_size(file_access) > 0


@pytest.mark.parametrize(
"file_path",
[None, "", "/invalid/path"],
)
def test_invalid_file_size(file_path):
"""test invalid file size behavior"""
assert file.get_file_size(file_path) == 0
16 changes: 16 additions & 0 deletions tests/unit/test_transaction_entry.py
Original file line number Diff line number Diff line change
Expand Up @@ -185,3 +185,19 @@ def test_extract_failure(tmp_path, tmp_store, test_data_native_dir, fake_process
entry.compressed = False
with pytest.raises(exceptions.CopyFileError):
entry.extract(tmp_path)


def test_large_compressed_file(tmp_store, test_data_native_dir):
"""test no compress for very large file"""
with mock.patch("pdbstore.io.file.get_file_size") as _get_file_size:
_get_file_size.return_value = TransactionEntry.MAX_COMPRESSED_FILE_SIZE + 10
entry = TransactionEntry(
tmp_store,
"dummylib.pdb",
"1972BE39B97341928816018A8ECD08D91",
test_data_native_dir / "dummylib.pdb",
True,
)
assert entry.commit() is True
assert entry.is_compressed() is False
assert entry.stored_path.exists()
Loading