Skip to content
Closed
Show file tree
Hide file tree
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Prev Previous commit
Next Next commit
change: now default compression threshold is zero and if that is the …
…case, the should_compress function will return true without a jsondumps check
  • Loading branch information
caue-paiva committed Sep 27, 2025
commit 45c49ae4411e2cd92137c8012bd8e815d8867cc2
6 changes: 5 additions & 1 deletion dash/_compression.py
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,7 @@ class BaseStoreCompressionManager(ABC):
def __init__(
self,
level: int = 6,
threshold: int = 1024,
threshold: int = 0, # by default always compress data
):
"""Initialize compression manager.

Expand All @@ -39,6 +39,8 @@ def __init__(
threshold: Minimum data size to compress (bytes)
"""
self.level = self._validate_level(level)
if threshold < 0:
raise ValueError("threshold argument should not be negative")
self.threshold = threshold

def _validate_level(self, level: int) -> int:
Expand Down Expand Up @@ -71,6 +73,8 @@ def should_compress(self, data: Any) -> bool:
"""
if data is None:
return False
if self.threshold == 0: # default threshold will always compress data
return True

# Convert to JSON to estimate size
try:
Expand Down
59 changes: 35 additions & 24 deletions tests/unit/test_compression.py
Original file line number Diff line number Diff line change
@@ -1,11 +1,9 @@
"""Unit tests for Store compression managers."""

import pytest
import json
import base64

from dash._compression import (
BaseStoreCompressionManager,
GzipCompressionManager,
DeflateCompressionManager,
BrotliCompressionManager,
Expand Down Expand Up @@ -69,7 +67,9 @@ def test_brotli_level_validation(self):
BrotliCompressionManager(level=11)

# Invalid brotli levels
with pytest.raises(ValueError, match="Brotli compression level must be 0-11"):
with pytest.raises(
ValueError, match="Brotli compression level must be 0-11"
):
BrotliCompressionManager(level=12)
except ImportError:
# Brotli not available, skip test
Expand Down Expand Up @@ -104,10 +104,13 @@ def test_should_compress_large_data(self):

def test_should_compress_unserializable(self):
"""Test that unserializable data is not compressed."""
manager = GzipCompressionManager()
manager = GzipCompressionManager(
threshold=100
) # Set threshold > 0 to test serialization check

class UnserializableClass:
pass
def __str__(self):
raise TypeError("Cannot convert to string")

unserializable = UnserializableClass()
assert not manager.should_compress(unserializable)
Expand All @@ -116,7 +119,9 @@ class UnserializableClass:
class TestCompressionRoundTrip:
"""Test compression and decompression round-trip behavior."""

@pytest.mark.parametrize("manager_class", [GzipCompressionManager, DeflateCompressionManager])
@pytest.mark.parametrize(
"manager_class", [GzipCompressionManager, DeflateCompressionManager]
)
def test_basic_round_trip(self, manager_class):
"""Test basic compression/decompression round trip."""
manager = manager_class(threshold=10) # Low threshold to ensure compression
Expand All @@ -125,7 +130,7 @@ def test_basic_round_trip(self, manager_class):
original_data = {
"numbers": list(range(100)), # Much larger dataset
"text": "Hello, world! " * 50, # Repeat text to make it larger
"nested": {"key": "value" * 20, "count": 42}
"nested": {"key": "value" * 20, "count": 42},
}

# Compress
Expand Down Expand Up @@ -202,8 +207,10 @@ def test_compression_error_fallback(self):

# Mock a compression failure by overriding _compress_bytes
original_compress = manager._compress_bytes

def failing_compress(data):
raise OSError("Compression failed")

manager._compress_bytes = failing_compress

data = {"test": "data" * 50}
Expand All @@ -225,7 +232,7 @@ def test_decompression_error_fallback(self):
"algorithm": "gzip",
"data": "invalid_base64_data!!!",
"original_size": 100,
"compressed_size": 50
"compressed_size": 50,
}

result = manager.decompress_store_data(invalid_payload)
Expand All @@ -243,7 +250,7 @@ def test_algorithm_mismatch_fallback(self):
"algorithm": "deflate", # Wrong algorithm
"data": base64.b64encode(b"test").decode("ascii"),
"original_size": 100,
"compressed_size": 50
"compressed_size": 50,
}

result = gzip_manager.decompress_store_data(mismatched_payload)
Expand Down Expand Up @@ -283,11 +290,11 @@ def test_compress_callback_outputs_multiple(self):

output_value = [
{"store": "data" * 100}, # Should be compressed
{"graph": "figure_data"} # Should not be compressed (not Store)
{"graph": "figure_data"}, # Should not be compressed (not Store)
]
output_spec = [
{"type": "Store", "property": "data"},
{"type": "Graph", "property": "figure"}
{"type": "Graph", "property": "figure"},
]

result = manager.compress_callback_outputs(output_value, output_spec)
Expand All @@ -313,7 +320,7 @@ def test_decompress_callback_inputs(self):
func_args = (compressed_data, "other_arg")
input_spec = [
{"type": "Store", "property": "data"},
{"type": "Input", "property": "value"}
{"type": "Input", "property": "value"},
]

result = manager.decompress_callback_inputs(func_args, input_spec)
Expand All @@ -331,13 +338,10 @@ def test_non_store_components_ignored(self):
"""Test that non-Store components are ignored during compression."""
manager = GzipCompressionManager(threshold=10)

output_value = [
{"data": "value1"},
{"data": "value2"}
]
output_value = [{"data": "value1"}, {"data": "value2"}]
output_spec = [
{"type": "Input", "property": "value"},
{"type": "Div", "property": "children"}
{"type": "Div", "property": "children"},
]

result = manager.compress_callback_outputs(output_value, output_spec)
Expand Down Expand Up @@ -389,7 +393,7 @@ def test_is_store_output_negative(self):
non_store_specs = [
{"type": "Input", "property": "value"},
{"type": "Store", "property": "clear_data"}, # Wrong property
{"type": "Div", "property": "children"}
{"type": "Div", "property": "children"},
]

for spec in non_store_specs:
Expand All @@ -409,7 +413,7 @@ def test_is_store_input_negative(self):
non_store_specs = [
{"type": "Input", "property": "value"},
{"type": "Store", "property": "modified_timestamp"}, # Wrong property
{"type": "State", "property": "data"}
{"type": "State", "property": "data"},
]

for spec in non_store_specs:
Expand All @@ -427,7 +431,14 @@ def test_compressed_payload_structure(self):
compressed = manager.compress_store_data(data)

# Check required fields
required_fields = ["compressed", "algorithm", "level", "data", "original_size", "compressed_size"]
required_fields = [
"compressed",
"algorithm",
"level",
"data",
"original_size",
"compressed_size",
]
for field in required_fields:
assert field in compressed

Expand Down Expand Up @@ -455,20 +466,20 @@ def test_is_compressed_payload_detection(self):
"algorithm": "gzip",
"data": "eJzLSM3JyVcozy/KSVEEABxJBD4=",
"original_size": 20,
"compressed_size": 15
"compressed_size": 15,
}
assert manager._is_compressed_payload(compressed_payload)

# Invalid payloads
invalid_payloads = [
{"compressed": False, "algorithm": "gzip", "data": "test"},
{"algorithm": "gzip", "data": "test"}, # Missing compressed field
{"compressed": True, "data": "test"}, # Missing algorithm
{"compressed": True, "data": "test"}, # Missing algorithm
{"compressed": True, "algorithm": "gzip"}, # Missing data
"not_a_dict",
None,
{"regular": "data"}
{"regular": "data"},
]

for payload in invalid_payloads:
assert not manager._is_compressed_payload(payload)
assert not manager._is_compressed_payload(payload)