Skip to content

Commit

Permalink
add a soft clamping of value before LFQ step
Browse files Browse the repository at this point in the history
  • Loading branch information
lucidrains committed May 8, 2024
1 parent d266deb commit 3125800
Show file tree
Hide file tree
Showing 3 changed files with 5 additions and 3 deletions.
4 changes: 3 additions & 1 deletion magvit2_pytorch/magvit2_pytorch.py
Expand Up @@ -1065,6 +1065,7 @@ def __init__(
lfq_commitment_loss_weight = 1.,
lfq_diversity_gamma = 2.5,
quantizer_aux_loss_weight = 1.,
lfq_soft_clamp_input_value = 10.,
lfq_activation = nn.Identity(),
use_fsq = False,
fsq_levels: Optional[List[int]] = None,
Expand Down Expand Up @@ -1362,7 +1363,8 @@ def __init__(
num_codebooks = num_codebooks,
entropy_loss_weight = lfq_entropy_loss_weight,
commitment_loss_weight = lfq_commitment_loss_weight,
diversity_gamma = lfq_diversity_gamma
diversity_gamma = lfq_diversity_gamma,
soft_clamp_input_value = lfq_soft_clamp_input_value
)

else:
Expand Down
2 changes: 1 addition & 1 deletion magvit2_pytorch/version.py
@@ -1 +1 @@
__version__ = '0.4.3'
__version__ = '0.4.4'
2 changes: 1 addition & 1 deletion setup.py
Expand Up @@ -31,7 +31,7 @@
'pillow',
'pytorch-custom-utils>=0.0.9',
'numpy',
'vector-quantize-pytorch>=1.14.10',
'vector-quantize-pytorch>=1.14.20',
'taylor-series-linear-attention>=0.1.5',
'torch',
'torchvision',
Expand Down

0 comments on commit 3125800

Please sign in to comment.