We read every piece of feedback, and take your input very seriously.
To see all available qualifiers, see our documentation.
1 parent 308527c commit 7d214a3Copy full SHA for 7d214a3
1 file changed
speechbrain/lobes/models/transformer/Conformer.py
@@ -802,10 +802,7 @@ def forward_streaming(
802
The attention values.
803
"""
804
805
- if (
806
- self.attention_type == "RelPosMHAXL"
807
- or self.attention_type == "RoPEMHA"
808
- ):
+ if self.attention_type == "RelPosMHAXL":
809
if pos_embs is None:
810
raise ValueError(
811
f"The chosen attention type for the Conformer is {self.attention_type}. For this attention type, the positional embeddings are mandatory"
0 commit comments